From 653b0ce5b89e8615d31bca3b15b60aac96c46e11 Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 1 Feb 2022 19:02:18 +0530 Subject: [PATCH 01/17] Changes to support scaled vrf/network configs (#131) * Changes to support scaled vrf/network configs * Changes to support scaled vrf/network configs * Addressed review comments * Addressed review comments --- plugins/module_utils/network/dcnm/dcnm.py | 76 +++++++++++++++++++ plugins/modules/dcnm_network.py | 16 +--- plugins/modules/dcnm_vrf.py | 16 +--- .../dcnm_network/tests/dcnm/scale.yaml | 75 ++++++++++++++++++ .../targets/dcnm_vrf/tests/dcnm/scale.yaml | 75 ++++++++++++++++++ tests/unit/modules/dcnm/test_dcnm_vrf.py | 76 +++++++++++++------ 6 files changed, 282 insertions(+), 52 deletions(-) create mode 100644 tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml create mode 100644 tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml diff --git a/plugins/module_utils/network/dcnm/dcnm.py b/plugins/module_utils/network/dcnm/dcnm.py index 1ddfa460d..892994723 100644 --- a/plugins/module_utils/network/dcnm/dcnm.py +++ b/plugins/module_utils/network/dcnm/dcnm.py @@ -18,6 +18,7 @@ import json import time import re +import sys from ansible.module_utils.common import validation from ansible.module_utils.connection import Connection @@ -392,3 +393,78 @@ def dcnm_version_supported(module): module.fail_json(msg=msg) return supported + + +def parse_response(response): + + if response.get('ERROR') == 'Not Found' and response['RETURN_CODE'] == 404: + return True, False + if response['RETURN_CODE'] != 200 or response['MESSAGE'] != 'OK': + return False, True + return False, False + + +def dcnm_get_url(module, fabric, path, items, module_name): + """ + Query DCNM/NDFC and return query values. + Some queries like network/vrf queries send thier names + as part of URL. This method sends multiple queries and returns + a consolidated response if the url exceeds 6144 characters. + + Parameters: + module: String representing the module + fabric: String representing the fabric + path: String representing the path to query + items: String representing query items + module_name: String representing the name of calling module + + Returns: + dict: Response DATA from DCNM/NDFC + """ + + method = 'GET' + send_count = 1 + + # NDFC/DCNM12 can handle urls upto 6144 characters. + # The size here represents the total size of all item names. + # The number 5900 has been arrived after making some room + # for query path(url) + if sys.getsizeof(items) > 5900: + if (sys.getsizeof(items) % 5900) == 0: + send_count = sys.getsizeof(items)/5900 + else: + send_count = sys.getsizeof(items)//5900 + 1 + + itemlist = items.split(',') + + iter = 0 + while iter < send_count: + if send_count == 1: + url = path.format(fabric, items) + elif (iter != (send_count - 1)): + itemstr = ','.join(itemlist[(iter*(len(itemlist)//send_count)):((iter+1)*(len(itemlist)//send_count))]) + url = path.format(fabric, itemstr) + else: + itemstr = ','.join(itemlist[iter*(len(itemlist)//send_count):]) + url = path.format(fabric, itemstr) + + att_objects = dcnm_send(module, method, url) + + missing_fabric, not_ok = parse_response(att_objects) + + if missing_fabric or not_ok: + msg1 = "Fabric {0} not present on DCNM".format(fabric) + msg2 = "Unable to find " \ + "{0}: {1} under fabric: {2}".format(module_name, items[:-1], fabric) + + module.fail_json(msg=msg1 if missing_fabric else msg2) + return + + if iter == 0: + attach_objects = att_objects + else: + attach_objects['DATA'].extend(att_objects['DATA']) + + iter += 1 + + return attach_objects diff --git a/plugins/modules/dcnm_network.py b/plugins/modules/dcnm_network.py index a20bb7051..fa0ba6aeb 100644 --- a/plugins/modules/dcnm_network.py +++ b/plugins/modules/dcnm_network.py @@ -379,7 +379,7 @@ import re from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import get_fabric_inventory_details, \ dcnm_send, validate_list_of_dicts, dcnm_get_ip_addr_info, get_ip_sn_dict, get_fabric_details, \ - get_ip_sn_fabric_dict, dcnm_version_supported + get_ip_sn_fabric_dict, dcnm_version_supported, dcnm_get_url from ansible.module_utils.connection import Connection from ansible.module_utils.basic import AnsibleModule @@ -954,19 +954,7 @@ def get_have(self): if not curr_networks: return - path = self.paths["GET_NET_ATTACH"].format(self.fabric, ','.join(curr_networks)) - - net_attach_objects = dcnm_send(self.module, method, path) - - missing_fabric, not_ok = self.handle_response(net_attach_objects, 'query_dcnm') - - if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find attachments for " \ - "networks: {} under fabric: {}".format(','.join(curr_networks), self.fabric) - - self.module.fail_json(msg=msg1 if missing_fabric else msg2) - return + net_attach_objects = dcnm_get_url(self.module, self.fabric, self.paths["GET_NET_ATTACH"], ','.join(curr_networks), "networks") if not net_attach_objects['DATA']: return diff --git a/plugins/modules/dcnm_vrf.py b/plugins/modules/dcnm_vrf.py index 7f93b4dbd..9b66b9db4 100644 --- a/plugins/modules/dcnm_vrf.py +++ b/plugins/modules/dcnm_vrf.py @@ -346,7 +346,7 @@ from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import \ get_fabric_inventory_details, dcnm_send, validate_list_of_dicts, \ dcnm_get_ip_addr_info, get_ip_sn_dict, get_fabric_details, get_ip_sn_fabric_dict, \ - dcnm_version_supported + dcnm_version_supported, dcnm_get_url from ansible.module_utils.basic import AnsibleModule @@ -667,19 +667,7 @@ def get_have(self): for vrf in vrf_objects['DATA']: curr_vrfs += vrf['vrfName'] + ',' - path = self.paths["GET_VRF_ATTACH"].format(self.fabric, curr_vrfs[:-1]) - - vrf_attach_objects = dcnm_send(self.module, method, path) - - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') - - if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find attachments for " \ - "vrfs: {} under fabric: {}".format(curr_vrfs[:-1], self.fabric) - - self.module.fail_json(msg=msg1 if missing_fabric else msg2) - return + vrf_attach_objects = dcnm_get_url(self.module, self.fabric, self.paths["GET_VRF_ATTACH"], curr_vrfs[:-1], "vrfs") if not vrf_attach_objects['DATA']: return diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml new file mode 100644 index 000000000..1aaa7d121 --- /dev/null +++ b/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml @@ -0,0 +1,75 @@ + +############################################## +## SETUP ## +############################################## + +- set_fact: + rest_path: "/rest/control/fabrics/{{ ansible_it_fabric }}" + when: controller_version == "11" + +- set_fact: + rest_path: "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{{ ansible_it_fabric }}" + when: controller_version >= "12" + +- name: SCALE - Verify if fabric is deployed. + cisco.dcnm.dcnm_rest: + method: GET + path: "{{ rest_path }}" + register: result + +- assert: + that: + - 'result.response.DATA != None' + +- name: SCALE - Clean up any existing networks + cisco.dcnm.dcnm_network: + fabric: "{{ ansible_it_fabric }}" + state: deleted + +- name: SCALE - sleep for 40 seconds for DCNM to completely update the state + wait_for: + timeout: 40 + +- name: Dummy set fact for leaf_attach_list + set_fact: + leaf_net_attach: [] + +- name: Build list of Networks to be deployed + set_fact: + nets_list: "{{ nets_list|default([]) + [{ 'net_name': 'TEST_NET%03d' | format(item), 'vrf_name': 'Tenant-1', 'deploy': 'no', 'net_id': (item | int + 50000) | int, 'vlan_id': (item | int + 2000) | int, 'attach': leaf_net_attach }] }}" + loop: '{{ range(0, 800) | list }}' + +- name: Push all Networks to DCNM + cisco.dcnm.dcnm_network: + fabric: '{{ ansible_it_fabric }}' + state: merged + config: '{{ nets_list }}' + register: result + +- name: SCALE - Clean up existing networks + cisco.dcnm.dcnm_network: &conf + fabric: "{{ ansible_it_fabric }}" + state: deleted + +- name: SCALE - sleep for 40 seconds for DCNM to completely update the state + wait_for: + timeout: 40 + +- name: SCALE - conf - Idempotence + cisco.dcnm.dcnm_network: *conf + register: result + +- assert: + that: + - 'result.changed == false' + - 'result.response|length == 0' + - 'result.diff|length == 0' + +################################################ +#### CLEAN-UP ## +################################################ + +- name: SCALE - Clean up any existing networks + cisco.dcnm.dcnm_network: + fabric: "{{ ansible_it_fabric }}" + state: deleted diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml new file mode 100644 index 000000000..262e5b640 --- /dev/null +++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml @@ -0,0 +1,75 @@ + +############################################## +## SETUP ## +############################################## + +- set_fact: + rest_path: "/rest/control/fabrics/{{ ansible_it_fabric }}" + when: controller_version == "11" + +- set_fact: + rest_path: "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{{ ansible_it_fabric }}" + when: controller_version >= "12" + +- name: SCALE - Verify if fabric is deployed. + cisco.dcnm.dcnm_rest: + method: GET + path: "{{ rest_path }}" + register: result + +- assert: + that: + - 'result.response.DATA != None' + +- name: SCALE - Clean up any existing vrfs + cisco.dcnm.dcnm_vrf: + fabric: "{{ ansible_it_fabric }}" + state: deleted + +- name: SCALE - sleep for 40 seconds for DCNM to completely update the state + wait_for: + timeout: 40 + +- name: Dummy set fact for leaf_attach_list + set_fact: + leaf_vrf_attach: [] + +- name: Build list of VRFs to be deployed + set_fact: + vrfs_list: "{{ vrfs_list|default([]) + [{ 'vrf_name': 'TEST_VRF%03d' | format(item), 'deploy': 'no', 'vrf_id': (item | int + 50000) | int, 'vlan_id': (item | int + 2000) | int, 'attach': leaf_vrf_attach }] }}" + loop: '{{ range(0, 800) | list }}' + +- name: Push all VRFs to DCNM + cisco.dcnm.dcnm_vrf: + fabric: '{{ ansible_it_fabric }}' + state: merged + config: '{{ vrfs_list }}' + register: result + +- name: SCALE - Clean up existing vrfs + cisco.dcnm.dcnm_vrf: &conf + fabric: "{{ ansible_it_fabric }}" + state: deleted + +- name: SCALE - sleep for 40 seconds for DCNM to completely update the state + wait_for: + timeout: 40 + +- name: SCALE - conf - Idempotence + cisco.dcnm.dcnm_vrf: *conf + register: result + +- assert: + that: + - 'result.changed == false' + - 'result.response|length == 0' + - 'result.diff|length == 0' + +################################################ +#### CLEAN-UP ## +################################################ + +- name: SCALE - Clean up any existing vrfs + cisco.dcnm.dcnm_vrf: + fabric: "{{ ansible_it_fabric }}" + state: deleted diff --git a/tests/unit/modules/dcnm/test_dcnm_vrf.py b/tests/unit/modules/dcnm/test_dcnm_vrf.py index dd0271fb6..dd6fb603a 100644 --- a/tests/unit/modules/dcnm/test_dcnm_vrf.py +++ b/tests/unit/modules/dcnm/test_dcnm_vrf.py @@ -110,12 +110,16 @@ def setUp(self): self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_version_supported') self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() + self.mock_dcnm_get_url = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_get_url') + self.run_dcnm_get_url = self.mock_dcnm_get_url.start() + def tearDown(self): super(TestDcnmVrfModule, self).tearDown() self.mock_dcnm_send.stop() self.mock_dcnm_ip_sn.stop() self.mock_dcnm_fabric_details.stop() self.mock_dcnm_version_supported.stop() + self.mock_dcnm_get_url.stop() def load_fixtures(self, response=None, device=''): @@ -133,7 +137,8 @@ def load_fixtures(self, response=None, device=''): elif '_check_mode' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only] @@ -157,25 +162,29 @@ def load_fixtures(self, response=None, device=''): elif '_merged_duplicate' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only] elif '_merged_lite_duplicate' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only] elif '_merged_with_incorrect' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only] elif '_merged_with_update' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.blank_data, self.attach_success_resp, @@ -183,7 +192,8 @@ def load_fixtures(self, response=None, device=''): elif '_merged_lite_update_vlan' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only, self.blank_data, self.attach_success_resp, @@ -191,7 +201,8 @@ def load_fixtures(self, response=None, device=''): elif '_merged_lite_update' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only, self.attach_success_resp, self.deploy_success_resp] @@ -216,7 +227,8 @@ def load_fixtures(self, response=None, device=''): elif 'replace_with_no_atch' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only, self.attach_success_resp, self.deploy_success_resp, @@ -224,7 +236,8 @@ def load_fixtures(self, response=None, device=''): elif 'replace_lite_no_atch' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.attach_success_resp, self.deploy_success_resp, @@ -233,7 +246,8 @@ def load_fixtures(self, response=None, device=''): elif 'replace_with_changes' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only, self.attach_success_resp, self.deploy_success_resp, @@ -241,7 +255,8 @@ def load_fixtures(self, response=None, device=''): elif 'replace_lite_changes' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.attach_success_resp, self.deploy_success_resp, @@ -249,13 +264,15 @@ def load_fixtures(self, response=None, device=''): elif 'replace_without_changes' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only] elif 'replace_lite_without_changes' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only] @@ -269,7 +286,8 @@ def load_fixtures(self, response=None, device=''): elif 'lite_override_with_deletions' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.attach_success_resp, self.deploy_success_resp, @@ -280,7 +298,8 @@ def load_fixtures(self, response=None, device=''): elif 'override_with_deletions' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_ov_att1_only, self.mock_vrf_attach_get_ext_object_ov_att2_only, self.attach_success_resp, self.deploy_success_resp, @@ -291,19 +310,22 @@ def load_fixtures(self, response=None, device=''): elif 'override_without_changes' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only] elif 'override_no_changes_lite' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att3_only, self.mock_vrf_attach_get_ext_object_merge_att4_only,] elif 'delete_std' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_dcnm_att1_only, self.mock_vrf_attach_get_ext_object_dcnm_att2_only, self.attach_success_resp, self.deploy_success_resp, @@ -313,7 +335,8 @@ def load_fixtures(self, response=None, device=''): elif 'delete_std_lite' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_dcnm_att1_only, self.mock_vrf_attach_get_ext_object_dcnm_att4_only, self.attach_success_resp, self.deploy_success_resp, @@ -324,7 +347,8 @@ def load_fixtures(self, response=None, device=''): elif 'delete_failure' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_dcnm_att1_only, self.mock_vrf_attach_get_ext_object_dcnm_att2_only, self.attach_success_resp, self.deploy_success_resp, @@ -339,7 +363,8 @@ def load_fixtures(self, response=None, device=''): obj1['DATA'][0].update({'vrfName': 'test_vrf_dcnm'}) obj2['DATA'][0].update({'vrfName': 'test_vrf_dcnm'}) - self.run_dcnm_send.side_effect = [self.mock_vrf_object_dcnm_only, self.mock_vrf_attach_object_dcnm_only, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object_dcnm_only] + self.run_dcnm_send.side_effect = [self.mock_vrf_object_dcnm_only, self.mock_vrf_attach_get_ext_object_dcnm_att1_only, self.mock_vrf_attach_get_ext_object_dcnm_att2_only, self.attach_success_resp, self.deploy_success_resp, obj1, @@ -348,7 +373,8 @@ def load_fixtures(self, response=None, device=''): elif 'query' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att2_only, self.mock_vrf_object, @@ -357,7 +383,8 @@ def load_fixtures(self, response=None, device=''): elif 'query_vrf_lite' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.mock_vrf_object, @@ -366,7 +393,8 @@ def load_fixtures(self, response=None, device=''): elif 'query_vrf_lite_without_config' in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object2, + self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] + self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_get_ext_object_merge_att1_only, self.mock_vrf_attach_get_ext_object_merge_att4_only, self.mock_vrf_object, From d977d53cde59373b46dcd8e9e19b1db117307ce4 Mon Sep 17 00:00:00 2001 From: Rostyslav Davydenko <35100270+rost-d@users.noreply.github.com> Date: Thu, 3 Mar 2022 13:35:55 +0000 Subject: [PATCH 02/17] Add Python template type support (#135) * Add Python template type support * Handle template update to honor existing content type * Handle the default value of type value in nested list of configs --- plugins/modules/dcnm_template.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/plugins/modules/dcnm_template.py b/plugins/modules/dcnm_template.py index f08f86e0a..98099e28d 100644 --- a/plugins/modules/dcnm_template.py +++ b/plugins/modules/dcnm_template.py @@ -66,6 +66,15 @@ - Multiple line configuration snip that can be used to associate to devices as policy type: str + + type: + description: + - Type of the template content either CLI or Python + type: str + choices: + - cli + - python + default: cli """ EXAMPLES = """ @@ -236,6 +245,7 @@ def dcnm_template_validate_input(self): description=dict(required=False, type="str", default=""), tags=dict(required=False, type="str", default=""), content=dict(required=True, type="str"), + type=dict(required=False, type="str", default="cli"), ) elif self.module.params["state"] == "deleted": template_spec = dict(name=dict(required=True, type="str")) @@ -280,6 +290,8 @@ def dcnm_template_get_template_payload(self, ditem): std_cont = std_cont.replace("__TEMPLATE_NAME", ditem["name"]) std_cont = std_cont.replace("__DESCRIPTION", ditem["description"]) std_cont = std_cont.replace("__TAGS", ditem["tags"]) + if ditem["type"] == "python": + std_cont = std_cont.replace("TEMPLATE_CLI", "PYTHON") final_cont = std_cont + ditem["content"] + "##" @@ -327,12 +339,20 @@ def dcnm_template_compare_template(self, template): else: tags = match_pb["tags"] + if match_pb.get("type", None) is None: + # Type is not included in config. So take it from have + type = have["contentType"].lower() + update_content = True + else: + type = match_pb["type"] + if update_content is True: template["content"] = self.dcnm_template_build_content( match_pb["content"], template["template_name"], desc, tags, + type, ) # Check the content # Before doing that remove 'imports = ;\n' from have. We do not have it in want. @@ -635,13 +655,15 @@ def dcnm_template_send_message_to_dcnm(self): self.result["changed"] = delete_flag or create_flag - def dcnm_template_build_content(self, content, name, desc, tags): + def dcnm_template_build_content(self, content, name, desc, tags, type): std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n##\n##template content\n" std_cont = std_cont.replace("__TEMPLATE_NAME", name) std_cont = std_cont.replace("__DESCRIPTION", desc) std_cont = std_cont.replace("__TAGS", tags) + if type == "python": + std_cont = std_cont.replace("TEMPLATE_CLI", "PYTHON") final_cont = std_cont + content + "##" return final_cont @@ -655,6 +677,10 @@ def dcnm_template_copy_config(self): return for cfg in self.config: + # Default value is not filled automatically in dicts nested in list + # Therefore handle defaulting type field of config list here + if "type" not in cfg.keys(): + cfg["type"] = "cli" self.pb_input.append(copy.deepcopy(cfg)) From b57b824db3c277572ac7a4b1dfcd251d5d05d326 Mon Sep 17 00:00:00 2001 From: Mike Wiebe Date: Wed, 16 Mar 2022 09:58:49 -0400 Subject: [PATCH 03/17] Enable ansible-test github action and fix ansible-test sanity failures (#133) * Enable test branch Temporary enablement of CI tests on `ansible-test-fixes` branch * Enable new ansible core versions Enable new versions of ansible core for `Build DCNM collection` job. * Update python version to 3.8 * Enable Ansible Core Versions Enable new Ansible Core versions for `Run DCNM Unit Tests` job * Enable ansibe-test sanity * Update main.yml * Add options to ansible-test command * Run ansible-sanity in docker * dcnm_interface fixes * dcnm_inventory fixes * Fix additional ansible-test failures in plugins * Additional ansible-test fixes * Additional ansible-test fixes and skip some tests * Fix doc issues in dcnm_service_policy * Fix yamllint issues and re-nable yamllint tests * Remove duplicate yaml key * Enable ignore files and run all tests --- .github/workflows/main.yml | 48 +- README.md | 2 +- meta/runtime.yml | 2 +- plugins/action/dcnm_interface.py | 6 +- plugins/action/dcnm_inventory.py | 4 +- plugins/action/dcnm_vrf.py | 19 +- plugins/httpapi/dcnm.py | 231 +- plugins/module_utils/network/dcnm/dcnm.py | 248 +- plugins/modules/dcnm_interface.py | 2102 +++++---- plugins/modules/dcnm_inventory.py | 656 +-- plugins/modules/dcnm_network.py | 1706 ++++---- plugins/modules/dcnm_policy.py | 159 +- plugins/modules/dcnm_rest.py | 53 +- plugins/modules/dcnm_service_node.py | 362 +- plugins/modules/dcnm_service_policy.py | 329 +- plugins/modules/dcnm_service_route_peering.py | 1169 ++++-- plugins/modules/dcnm_template.py | 107 +- plugins/modules/dcnm_vrf.py | 1542 ++++--- tests/.gitignore | 1 + .../tests/dcnm/dcnm_delete_diff_options.yaml | 1 - .../tests/dcnm/dcnm_eth_delete.yaml | 2 +- .../tests/dcnm/dcnm_eth_merge.yaml | 3 +- .../tests/dcnm/dcnm_eth_override.yaml | 3 +- .../tests/dcnm/dcnm_eth_replace.yaml | 3 +- .../tests/dcnm/dcnm_intf_multi_switches.yaml | 3 +- .../dcnm/dcnm_intf_no_optional_elems.yaml | 3 +- .../tests/dcnm/dcnm_intf_query.yaml | 2 +- .../tests/dcnm/dcnm_lo_delete.yaml | 3 +- .../tests/dcnm/dcnm_lo_merge.yaml | 3 +- .../tests/dcnm/dcnm_lo_override.yaml | 3 +- .../tests/dcnm/dcnm_lo_replace.yaml | 3 +- .../tests/dcnm/dcnm_old_format_pb.yaml | 2 +- .../tests/dcnm/dcnm_pc_delete.yaml | 3 +- .../tests/dcnm/dcnm_pc_merge.yaml | 3 +- .../tests/dcnm/dcnm_pc_override.yaml | 3 +- .../tests/dcnm/dcnm_pc_replace.yaml | 3 +- .../tests/dcnm/dcnm_sub_delete.yaml | 3 +- .../tests/dcnm/dcnm_sub_merge.yaml | 3 +- .../tests/dcnm/dcnm_sub_override.yaml | 3 +- .../tests/dcnm/dcnm_sub_replace.yaml | 3 +- .../tests/dcnm/dcnm_vpc_delete.yaml | 3 +- .../tests/dcnm/dcnm_vpc_merge.yaml | 3 +- .../tests/dcnm/dcnm_vpc_override.yaml | 3 +- .../tests/dcnm/dcnm_vpc_replace.yaml | 3 +- .../dcnm_inventory/tests/dcnm/deleted.yaml | 2 +- .../dcnm_inventory/tests/dcnm/merged.yaml | 2 +- .../dcnm_inventory/tests/dcnm/overridden.yaml | 2 +- .../dcnm_inventory/tests/dcnm/query.yaml | 3 +- .../dcnm_network/tests/dcnm/deleted.yaml | 8 +- .../dcnm_network/tests/dcnm/merged.yaml | 3 +- .../dcnm_network/tests/dcnm/overridden.yaml | 2 +- .../dcnm_network/tests/dcnm/query.yaml | 2 +- .../dcnm_network/tests/dcnm/replaced.yaml | 2 +- .../dcnm_network/tests/dcnm/scale.yaml | 3 +- .../self-contained-tests/sm_dhcp_params.yaml | 3 +- .../targets/dcnm_policy/tasks/dcnm.yaml | 3 +- ...m_service_route_peering_adc_po_change.yaml | 3 +- .../dcnm_service_route_peering_delete.yaml | 2 +- ...nm_service_route_peering_fw_po_change.yaml | 3 +- .../dcnm_service_route_peering_merge.yaml | 3 +- ..._service_route_peering_merge_existing.yaml | 3 +- ...nm_service_route_peering_no_opt_elems.yaml | 3 +- .../dcnm_service_route_peering_no_state.yaml | 3 +- .../dcnm_service_route_peering_override.yaml | 3 +- .../dcnm_service_route_peering_query.yaml | 3 +- .../dcnm_service_route_peering_replace.yaml | 3 +- .../targets/dcnm_template/tasks/dcnm.yaml | 3 +- .../tests/dcnm/dcnm_template_delete.yaml | 3 +- .../tests/dcnm/dcnm_template_merge.yaml | 2 +- .../dcnm/dcnm_template_modify_properties.yaml | 3 +- .../tests/dcnm/dcnm_template_no_delete.yaml | 3 +- .../tests/dcnm/dcnm_template_query.yaml | 3 +- .../dcnm/dcnm_template_validation_fail.yaml | 3 +- .../tests/dcnm/dcnm_template_wrong_state.yaml | 2 +- .../targets/dcnm_vrf/tests/dcnm/scale.yaml | 3 +- .../prepare_dcnm_policy/tasks/main.yaml | 3 +- .../tasks/main.yaml | 3 +- .../prepare_dcnm_template/tasks/main.yaml | 3 +- tests/sanity/ignore-2.10.txt | 2 + tests/sanity/ignore-2.11.txt | 8 + tests/sanity/ignore-2.12.txt | 15 + tests/sanity/ignore-2.9.txt | 2 + tests/unit/modules/dcnm/dcnm_module.py | 58 +- tests/unit/modules/dcnm/test_dcnm_intf.py | 3738 ++++++++++------- .../unit/modules/dcnm/test_dcnm_inventory.py | 1229 +++--- tests/unit/modules/dcnm/test_dcnm_policy.py | 2055 +++++---- .../modules/dcnm/test_dcnm_service_node.py | 888 ++-- .../modules/dcnm/test_dcnm_service_policy.py | 2852 +++++++------ .../dcnm/test_dcnm_service_route_peering.py | 3634 +++++++++------- tests/unit/modules/dcnm/test_dcnm_template.py | 840 ++-- tests/unit/modules/dcnm/test_dcnm_vrf.py | 1506 ++++--- 91 files changed, 15075 insertions(+), 10666 deletions(-) create mode 100644 tests/.gitignore create mode 100644 tests/sanity/ignore-2.12.txt diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 635fa044a..3081d16a5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,7 @@ name: CI on: # Triggers the workflow on push or pull request events but only for the develop branch push: - branches: [ develop, main ] + branches: [ develop, main, ansible-test-fixes ] pull_request: branches: [ develop, main ] @@ -21,15 +21,15 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ansible: [2.9.12, 2.10.0] + ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] steps: - name: Check out code uses: actions/checkout@v2 - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 3.7 + python-version: 3.8 - name: Install ansible-base (v${{ matrix.ansible }}) run: pip install https://github.com/ansible/ansible/archive/v${{ matrix.ansible }}.tar.gz --disable-pip-version-check @@ -43,6 +43,38 @@ jobs: name: collection path: .cache/collection-tarballs + + sanity: + name: Run ansible-sanity tests + needs: + - build + runs-on: ubuntu-latest + strategy: + matrix: + ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] + steps: + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: Install ansible-base (v${{ matrix.ansible }}) + run: pip install https://github.com/ansible/ansible/archive/v${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Download migrated collection artifacts + uses: actions/download-artifact@v1 + with: + name: collection + path: .cache/collection-tarballs + + - name: Install the collection tarball + run: ansible-galaxy collection install .cache/collection-tarballs/*.tar.gz + + - name: Run sanity tests + run: ansible-test sanity --docker --python 3.8 -v --color --truncate 0 + working-directory: /home/runner/.ansible/collections/ansible_collections/cisco/dcnm + + unit-tests: name: Run DCNM Unit Tests needs: @@ -50,12 +82,12 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ansible: [2.9.12, 2.10.0] + ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] steps: - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 3.7 + python-version: 3.8 - name: Install ansible-base (v${{ matrix.ansible }}) run: pip install https://github.com/ansible/ansible/archive/v${{ matrix.ansible }}.tar.gz --disable-pip-version-check @@ -77,7 +109,6 @@ jobs: - name: Run DCNM Unit tests run: coverage run --source=. -m pytest tests/unit/modules/dcnm/. -vvvv - #run: ansible-test units --docker -v --color --truncate 0 --coverage working-directory: /home/runner/.ansible/collections/ansible_collections/cisco/dcnm env: PYTHONPATH: /home/runner/.ansible/collections @@ -85,4 +116,3 @@ jobs: - name: Generate coverage report run: coverage report working-directory: /home/runner/.ansible/collections/ansible_collections/cisco/dcnm - diff --git a/README.md b/README.md index 9ea25d37f..3aa0becd2 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ This collection is intended for use with the following release versions: ## Ansible version compatibility -This collection has been tested against following Ansible versions: **>=2.9.10,<2.12**. +This collection has been tested against following Ansible versions: **>=2.9.10,<2.13**. Plugins and modules within a collection may be tested with only specific Ansible versions. A collection may contain metadata that identifies these versions. diff --git a/meta/runtime.yml b/meta/runtime.yml index 285bed7c7..69dfbd085 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -3,7 +3,7 @@ # will redirect to a common action plugin called cisco.dcnm.dcnm # #--- -requires_ansible: '>=2.9.10,<2.12' +requires_ansible: '>=2.9.10,<2.13' #plugin_routing: # action: # dcnm_inventory: diff --git a/plugins/action/dcnm_interface.py b/plugins/action/dcnm_interface.py index deb1c845c..09145428f 100644 --- a/plugins/action/dcnm_interface.py +++ b/plugins/action/dcnm_interface.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ def run(self, tmp=None, task_vars=None): if (cfg.get('switch', None) is not None): for sw in cfg['switch']: if (isinstance(sw, list)): - msg = " !!! Switches included in playbook profiles must be individual items, but given switch element = {} is a list ".format(sw) + msg = " !!! Switches included in playbook profiles must be individual items, but given switch element = {0} is a list ".format(sw) warnings.append(msg) flattened = True flat_sw_list.extend(sw) @@ -51,7 +51,7 @@ def run(self, tmp=None, task_vars=None): for k in keys: if (('profile' in k) and (k != 'profile')): - msg = " !!! Profile name included in playbook tasks must be 'profile', but given profile name = '{}' ".format(k) + msg = " !!! Profile name included in playbook tasks must be 'profile', but given profile name = '{0}' ".format(k) warnings.append(msg) pop_key = k diff --git a/plugins/action/dcnm_inventory.py b/plugins/action/dcnm_inventory.py index 0902082b7..a6c2e0ac5 100644 --- a/plugins/action/dcnm_inventory.py +++ b/plugins/action/dcnm_inventory.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -47,7 +47,7 @@ def run(self, tmp=None, task_vars=None): msg = ( "PERSISTENT_COMMAND_TIMEOUT and PERSISTENT_CONNECT_TIMEOUT" ) - msg += " must be set to {} seconds or higher when using dcnm_inventory module.".format(timeout) + msg += " must be set to {0} seconds or higher when using dcnm_inventory module.".format(timeout) msg += " Current persistent_command_timeout setting:" + str( persistent_command_timeout ) diff --git a/plugins/action/dcnm_vrf.py b/plugins/action/dcnm_vrf.py index 5c84e505b..00bb9695f 100644 --- a/plugins/action/dcnm_vrf.py +++ b/plugins/action/dcnm_vrf.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,13 +27,16 @@ class ActionModule(ActionNetworkModule): def run(self, tmp=None, task_vars=None): - if self._task.args.get('state') == 'merged' or self._task.args.get('state') == 'overridden' \ - or self._task.args.get('state') == 'replaced': - for con in self._task.args['config']: - if 'attach' in con: - for at in con['attach']: - if 'vlan_id' in at: - msg = ("Playbook parameter vlan_id should not be specified under the attach: block. Please specify this under the config: block instead") + if ( + self._task.args.get("state") == "merged" + or self._task.args.get("state") == "overridden" + or self._task.args.get("state") == "replaced" + ): + for con in self._task.args["config"]: + if "attach" in con: + for at in con["attach"]: + if "vlan_id" in at: + msg = "Playbook parameter vlan_id should not be specified under the attach: block. Please specify this under the config: block instead" # noqa return {"failed": True, "msg": msg} self.result = super(ActionModule, self).run(task_vars=task_vars) diff --git a/plugins/httpapi/dcnm.py b/plugins/httpapi/dcnm.py index 626beea35..927ec52da 100644 --- a/plugins/httpapi/dcnm.py +++ b/plugins/httpapi/dcnm.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020-2021 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type DOCUMENTATION = """ @@ -38,15 +37,10 @@ class HttpApi(HttpApiBase): - def __init__(self, *args, **kwargs): super(HttpApi, self).__init__(*args, **kwargs) - self.headers = { - 'Content-Type': "application/json" - } - self.txt_headers = { - 'Content-Type': "text/plain" - } + self.headers = {"Content-Type": "application/json"} + self.txt_headers = {"Content-Type": "text/plain"} self.version = None def get_version(self): @@ -56,88 +50,133 @@ def set_version(self, version): self.version = version def _login_old(self, username, password, method, path): - ''' DCNM Helper Function to login to DCNM version 11. - ''' + """DCNM Helper Function to login to DCNM version 11.""" # Ansible expresses the persistent_connect_timeout in seconds. # This value needs to be converted to milliseconds for DCNM timeout = self.connection.get_option("persistent_connect_timeout") * 1000 data = "{'expirationTime': %s}" % timeout try: - response, response_data = self.connection.send(path, data, method=method, headers=self.headers, force_basic_auth=True) + response, response_data = self.connection.send( + path, data, method=method, headers=self.headers, force_basic_auth=True + ) vrd = self._verify_response(response, method, path, response_data) - if vrd['RETURN_CODE'] != 200: - self.login_fail_msg.append('Error on attempt to connect and authenticate with DCNM controller: {}'.format(vrd)) + if vrd["RETURN_CODE"] != 200: + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with DCNM controller: {0}".format( + vrd + ) + ) return response_value = self._get_response_value(response_data) - self.connection._auth = {'Dcnm-Token': self._response_to_json(response_value)['Dcnm-Token']} + self.connection._auth = { + "Dcnm-Token": self._response_to_json(response_value)["Dcnm-Token"] + } self.login_succeeded = True self.set_version(11) except Exception as e: - self.login_fail_msg.append('Error on attempt to connect and authenticate with DCNM controller: {}'.format(e)) + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with DCNM controller: {0}".format( + e + ) + ) def _login_latestv1(self, username, password, method, path): - ''' Nexus Dashboard NDFC Helper Function to login to NDFC version 12 or later. - ''' - login_domain = 'DefaultAuth' + """Nexus Dashboard NDFC Helper Function to login to NDFC version 12 or later.""" + login_domain = "DefaultAuth" # login_domain = 'local' - payload = {'username': self.connection.get_option('remote_user'), 'password': self.connection.get_option('password'), 'domain': login_domain} + payload = { + "username": self.connection.get_option("remote_user"), + "password": self.connection.get_option("password"), + "domain": login_domain, + } data = json.dumps(payload) try: - response, response_data = self.connection.send(path, data, method=method, headers=self.headers) + response, response_data = self.connection.send( + path, data, method=method, headers=self.headers + ) vrd = self._verify_response(response, method, path, response_data) - if vrd['RETURN_CODE'] != 200: - self.login_fail_msg.append('Error on attempt to connect and authenticate with NDFC controller: {}'.format(vrd)) + if vrd["RETURN_CODE"] != 200: + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with NDFC controller: {0}".format( + vrd + ) + ) return - self.connection._auth = {'Authorization': 'Bearer {0}'.format(self._response_to_json12(response_data).get('token'))} + self.connection._auth = { + "Authorization": "Bearer {0}".format( + self._response_to_json12(response_data).get("token") + ) + } self.login_succeeded = True self.set_version(12) except Exception as e: - self.login_fail_msg.append('Error on attempt to connect and authenticate with NDFC controller: {}'.format(e)) + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with NDFC controller: {0}".format( + e + ) + ) def _login_latestv2(self, username, password, method, path): - ''' Nexus Dashboard NDFC Helper Function to login to NDFC version 12 or later. - ''' - login_domain = 'DefaultAuth' + """Nexus Dashboard NDFC Helper Function to login to NDFC version 12 or later.""" + login_domain = "DefaultAuth" # login_domain = 'local' - payload = {'userName': self.connection.get_option('remote_user'), 'userPasswd': self.connection.get_option('password'), 'domain': login_domain} + payload = { + "userName": self.connection.get_option("remote_user"), + "userPasswd": self.connection.get_option("password"), + "domain": login_domain, + } data = json.dumps(payload) try: - response, response_data = self.connection.send(path, data, method=method, headers=self.headers) + response, response_data = self.connection.send( + path, data, method=method, headers=self.headers + ) vrd = self._verify_response(response, method, path, response_data) - if vrd['RETURN_CODE'] != 200: - self.login_fail_msg.append('Error on attempt to connect and authenticate with NDFC controller: {}'.format(vrd)) + if vrd["RETURN_CODE"] != 200: + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with NDFC controller: {0}".format( + vrd + ) + ) return - self.connection._auth = {'Authorization': 'Bearer {0}'.format(self._response_to_json12(response_data).get('token'))} + self.connection._auth = { + "Authorization": "Bearer {0}".format( + self._response_to_json12(response_data).get("token") + ) + } self.login_succeeded = True self.set_version(12) except Exception as e: - self.login_fail_msg.append('Error on attempt to connect and authenticate with NDFC controller: {}'.format(e)) + self.login_fail_msg.append( + "Error on attempt to connect and authenticate with NDFC controller: {0}".format( + e + ) + ) def login(self, username, password): - ''' DCNM/NDFC Login Method. This method is automatically called by the - Ansible plugin architecture if an active Token is not already - available. - ''' + """DCNM/NDFC Login Method. This method is automatically called by the + Ansible plugin architecture if an active Token is not already + available. + """ self.login_succeeded = False self.login_fail_msg = [] - method = 'POST' - path = {'dcnm': '/rest/logon', 'ndfc': '/login'} + method = "POST" + path = {"dcnm": "/rest/logon", "ndfc": "/login"} login12Func = [self._login_latestv2, self._login_latestv1] # Attempt to login to DCNM version 11 - self._login_old(username, password, method, path['dcnm']) + self._login_old(username, password, method, path["dcnm"]) # If login attempt failed then try NDFC version 12 if not self.login_succeeded: for func in login12Func: - func(username, password, method, path['ndfc']) + func(username, password, method, path["ndfc"]) if self.login_succeeded: break @@ -147,29 +186,45 @@ def login(self, username, password): def _logout_old(self, method, path): try: - response, response_data = self.connection.send(path, self.connection._auth['Dcnm-Token'], method=method, headers=self.headers, force_basic_auth=True) + response, response_data = self.connection.send( + path, + self.connection._auth["Dcnm-Token"], + method=method, + headers=self.headers, + force_basic_auth=True, + ) vrd = self._verify_response(response, method, path, response_data) - if vrd['RETURN_CODE'] != 200: - self.logout_fail_msg.append('Error on attempt to logout from DCNM controller: {}'.format(vrd)) + if vrd["RETURN_CODE"] != 200: + self.logout_fail_msg.append( + "Error on attempt to logout from DCNM controller: {0}".format(vrd) + ) return self.logout_succeeded = True except Exception as e: - self.logout_fail_msg.append('Error on attempt to logout from DCNM controller: {}'.format(e)) + self.logout_fail_msg.append( + "Error on attempt to logout from DCNM controller: {0}".format(e) + ) def _logout_latest(self, method, path): try: - response, response_data = self.connection.send(path, {}, method=method, headers=self.headers) + response, response_data = self.connection.send( + path, {}, method=method, headers=self.headers + ) vrd = self._verify_response(response, method, path, response_data) - if vrd['RETURN_CODE'] != 200: - self.logout_fail_msg.append('Error on attempt to logout from NDFC controller: {}'.format(vrd)) + if vrd["RETURN_CODE"] != 200: + self.logout_fail_msg.append( + "Error on attempt to logout from NDFC controller: {0}".format(vrd) + ) return self.logout_succeeded = True except Exception as e: - self.logout_fail_msg.append('Error on attempt to logout from NDFC controller: {}'.format(e)) + self.logout_fail_msg.append( + "Error on attempt to logout from NDFC controller: {0}".format(e) + ) def logout(self): if self.connection._auth is None: @@ -177,15 +232,15 @@ def logout(self): self.logout_succeeded = False self.logout_fail_msg = [] - method = 'POST' - path = {'dcnm': '/rest/logout', 'ndfc': '/logout'} + method = "POST" + path = {"dcnm": "/rest/logout", "ndfc": "/logout"} if self.version == 11: # Logout of DCNM version 11 - self._logout_old(method, path['dcnm']) + self._logout_old(method, path["dcnm"]) elif self.version >= 12: # Logout of DCNM version 12 - self._logout_latest(method, path['ndfc']) + self._logout_latest(method, path["ndfc"]) # If both login attemps fail, raise ConnectionError if not self.logout_succeeded: @@ -200,14 +255,16 @@ def check_url_connection(self): except requests.exceptions.RequestException as e: msg = """ - Please verify that the DCNM controller HTTPS URL ({}) is + Please verify that the DCNM controller HTTPS URL ({0}) is reachable from the Ansible controller and try again - """.format(self.connection._url) + """.format( + self.connection._url + ) raise ConnectionError(str(e) + msg) def send_request(self, method, path, json=None): - ''' This method handles all DCNM REST API requests other then login ''' + """This method handles all DCNM REST API requests other then login""" if json is None: json = {} @@ -217,42 +274,48 @@ def send_request(self, method, path, json=None): try: # Perform some very basic path input validation. path = str(path) - if path[0] != '/': - msg = 'Value of does not appear to be formated properly' + if path[0] != "/": + msg = "Value of does not appear to be formated properly" raise ConnectionError(self._return_info(None, method, path, msg)) - response, rdata = self.connection.send(path, json, method=method, headers=self.headers, force_basic_auth=True) + response, rdata = self.connection.send( + path, json, method=method, headers=self.headers, force_basic_auth=True + ) return self._verify_response(response, method, path, rdata) except Exception as e: eargs = e.args[0] - if isinstance(eargs, dict) and eargs.get('METHOD'): + if isinstance(eargs, dict) and eargs.get("METHOD"): return eargs raise ConnectionError(str(e)) def send_txt_request(self, method, path, txt=None): - ''' This method handles all DCNM REST API requests other then login ''' + """This method handles all DCNM REST API requests other then login""" if txt is None: - txt = '' + txt = "" self.check_url_connection() try: # Perform some very basic path input validation. path = str(path) - if path[0] != '/': - msg = 'Value of does not appear to be formated properly' + if path[0] != "/": + msg = "Value of does not appear to be formated properly" raise ConnectionError(self._return_info(None, method, path, msg)) - response, rdata = self.connection.send(path, txt, method=method, - headers=self.txt_headers, - force_basic_auth=True) + response, rdata = self.connection.send( + path, + txt, + method=method, + headers=self.txt_headers, + force_basic_auth=True, + ) return self._verify_response(response, method, path, rdata) except Exception as e: eargs = e.args[0] - if isinstance(eargs, dict) and eargs.get('METHOD'): + if isinstance(eargs, dict) and eargs.get("METHOD"): return eargs raise ConnectionError(str(e)) def _verify_response(self, response, method, path, rdata): - ''' Process the return code and response object from DCNM ''' + """Process the return code and response object from DCNM""" rv = self._get_response_value(rdata) jrd = self._response_to_json(rv) @@ -265,23 +328,23 @@ def _verify_response(self, response, method, path, rdata): if rc >= 200 and rc <= 600: return self._return_info(rc, method, path, msg, jrd) else: - msg = 'Unknown RETURN_CODE: {}'.format(rc) + msg = "Unknown RETURN_CODE: {0}".format(rc) raise ConnectionError(self._return_info(rc, method, path, msg, jrd)) def _get_response_value(self, response_data): - ''' Extract string data from response_data returned from DCNM ''' + """Extract string data from response_data returned from DCNM""" return to_text(response_data.getvalue()) def _response_to_json(self, response_text): - ''' Convert response_text to json format ''' + """Convert response_text to json format""" try: return json.loads(response_text) if response_text else {} # JSONDecodeError only available on Python 3.5+ except ValueError: - return 'Invalid JSON response: {}'.format(response_text) + return "Invalid JSON response: {0}".format(response_text) def _response_to_json12(self, response_text): - ''' Convert response_text to json format ''' + """Convert response_text to json format""" try: response_value = response_text.getvalue() @@ -293,16 +356,16 @@ def _response_to_json12(self, response_text): return json.loads(response_text) if response_text else {} # # JSONDecodeError only available on Python 3.5+ except ValueError: - return 'Invalid JSON response: {}'.format(response_text) + return "Invalid JSON response: {0}".format(response_text) def _return_info(self, rc, method, path, msg, json_respond_data=None): - ''' Format success/error data and return with consistent format ''' + """Format success/error data and return with consistent format""" info = {} - info['RETURN_CODE'] = rc - info['METHOD'] = method - info['REQUEST_PATH'] = path - info['MESSAGE'] = msg - info['DATA'] = json_respond_data + info["RETURN_CODE"] = rc + info["METHOD"] = method + info["REQUEST_PATH"] = path + info["MESSAGE"] = msg + info["DATA"] = json_respond_data return info diff --git a/plugins/module_utils/network/dcnm/dcnm.py b/plugins/module_utils/network/dcnm/dcnm.py index 892994723..a218b436f 100644 --- a/plugins/module_utils/network/dcnm/dcnm.py +++ b/plugins/module_utils/network/dcnm/dcnm.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function + +__metaclass__ = type import socket import json @@ -25,32 +26,38 @@ def validate_ip_address_format(type, item, invalid_params): - if ((type == 'ipv4_subnet') or (type == 'ipv4')): - addr_type = 'IPv4' + if (type == "ipv4_subnet") or (type == "ipv4"): + addr_type = "IPv4" addr_family = socket.AF_INET mask_len = 32 - if ((type == 'ipv6_subnet') or (type == 'ipv6')): - addr_type = 'IPv6' + if (type == "ipv6_subnet") or (type == "ipv6"): + addr_type = "IPv6" addr_family = socket.AF_INET6 mask_len = 128 - if (item.strip() != ''): - address = item.split('/')[0] - if ('subnet' in type): - if '/' in item: - subnet = item.split('/')[1] + if item.strip() != "": + address = item.split("/")[0] + if "subnet" in type: + if "/" in item: + subnet = item.split("/")[1] if not subnet or int(subnet) > mask_len: - invalid_params.append('{} : Invalid {} gw/subnet syntax'.format(item, addr_type)) + invalid_params.append( + "{0} : Invalid {1} gw/subnet syntax".format(item, addr_type) + ) else: - invalid_params.append('{} : Invalid {} gw/subnet syntax'.format(item, addr_type)) + invalid_params.append( + "{0} : Invalid {1} gw/subnet syntax".format(item, addr_type) + ) try: socket.inet_pton(addr_family, address) except socket.error: - invalid_params.append('{} : Invalid {} address syntax'.format(item, addr_type)) + invalid_params.append( + "{0} : Invalid {1} address syntax".format(item, addr_type) + ) def validate_list_of_dicts(param_list, spec, module=None): - """ Validate/Normalize playbook params. Will raise when invalid parameters found. + """Validate/Normalize playbook params. Will raise when invalid parameters found. param_list: a playbook parameter list of dicts spec: an argument spec dict e.g. spec = dict(ip=dict(required=True, type='ipv4'), @@ -65,56 +72,72 @@ def validate_list_of_dicts(param_list, spec, module=None): for param in spec: item = list_entry.get(param) if item is None: - if spec[param].get('required'): - invalid_params.append('{} : Required parameter not found'.format(param)) + if spec[param].get("required"): + invalid_params.append( + "{0} : Required parameter not found".format(param) + ) else: - item = spec[param].get('default') + item = spec[param].get("default") else: - type = spec[param].get('type') - if type == 'str': + type = spec[param].get("type") + if type == "str": item = v.check_type_str(item) - if spec[param].get('length_max'): - if 1 <= len(item) <= spec[param].get('length_max'): + if spec[param].get("length_max"): + if 1 <= len(item) <= spec[param].get("length_max"): pass - elif param == "vrf_name" and (len(item) <= spec[param].get('length_max')): + elif param == "vrf_name" and ( + len(item) <= spec[param].get("length_max") + ): pass else: - invalid_params.append('{}:{} : The string exceeds the allowed ' - 'range of max {} char'.format(param, item, - spec[param].get('length_max'))) - elif type == 'int': + invalid_params.append( + "{0}:{1} : The string exceeds the allowed " + "range of max {2} char".format( + param, item, spec[param].get("length_max") + ) + ) + elif type == "int": item = v.check_type_int(item) min_value = 1 - if spec[param].get('range_min') is not None: - min_value = spec[param].get('range_min') - if spec[param].get('range_max'): - if min_value <= item <= spec[param].get('range_max'): + if spec[param].get("range_min") is not None: + min_value = spec[param].get("range_min") + if spec[param].get("range_max"): + if min_value <= item <= spec[param].get("range_max"): pass else: - invalid_params.append('{}:{} : The item exceeds the allowed ' - 'range of max {}'.format(param, item, - spec[param].get('range_max'))) - elif type == 'bool': + invalid_params.append( + "{0}:{1} : The item exceeds the allowed " + "range of max {2}".format( + param, item, spec[param].get("range_max") + ) + ) + elif type == "bool": item = v.check_type_bool(item) - elif type == 'list': + elif type == "list": item = v.check_type_list(item) - elif type == 'dict': + elif type == "dict": item = v.check_type_dict(item) - elif ((type == 'ipv4_subnet') or (type == 'ipv4') - or (type == 'ipv6_subnet') or (type == 'ipv6')): + elif ( + (type == "ipv4_subnet") + or (type == "ipv4") + or (type == "ipv6_subnet") + or (type == "ipv6") + ): validate_ip_address_format(type, item, invalid_params) - choice = spec[param].get('choices') + choice = spec[param].get("choices") if choice: if item not in choice: - invalid_params.append('{} : Invalid choice provided'.format(item)) + invalid_params.append( + "{0} : Invalid choice provided".format(item) + ) - no_log = spec[param].get('no_log') + no_log = spec[param].get("no_log") if no_log: if module is not None: module.no_log_values.add(item) else: - msg = "\n\n'{}' is a no_log parameter".format(param) + msg = "\n\n'{0}' is a no_log parameter".format(param) msg += "\nAnsible module object must be passed to this " msg += "\nfunction to ensure it is not logged\n\n" raise Exception(msg) @@ -129,43 +152,43 @@ def get_fabric_inventory_details(module, fabric): inventory_data = {} rc = False - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(fabric) conn = Connection(module._socket_path) if conn.get_version() == 12: path = "/appcenter/cisco/ndfc/v1/lan-fabric" + path count = 1 - while (rc is False): + while rc is False: response = dcnm_send(module, method, path) - if not response.get('RETURN_CODE'): + if not response.get("RETURN_CODE"): rc = True module.fail_json(msg=response) - if response.get('RETURN_CODE') == 404: + if response.get("RETURN_CODE") == 404: # RC 404 - Object not found rc = True return inventory_data - if response.get('RETURN_CODE') == 401: + if response.get("RETURN_CODE") == 401: # RC 401: Server not reachable. Retry a few times - if (count <= 20): + if count <= 20: count = count + 1 rc = False time.sleep(0.1) continue - else: - raise Exception(response) - elif response.get('RETURN_CODE') >= 400: + + raise Exception(response) + elif response.get("RETURN_CODE") >= 400: # Handle additional return codes as needed but for now raise # for any error other then 404. raise Exception(response) - for device_data in response.get('DATA'): - key = device_data.get('ipAddress') + for device_data in response.get("DATA"): + key = device_data.get("ipAddress") inventory_data[key] = device_data rc = True @@ -178,9 +201,9 @@ def get_ip_sn_dict(inventory_data): hn_sn = {} for device_key in inventory_data.keys(): - ip = inventory_data[device_key].get('ipAddress') - sn = inventory_data[device_key].get('serialNumber') - hn = inventory_data[device_key].get('logicalName') + ip = inventory_data[device_key].get("ipAddress") + sn = inventory_data[device_key].get("serialNumber") + hn = inventory_data[device_key].get("logicalName") ip_sn.update({ip: sn}) hn_sn.update({hn: sn}) @@ -206,9 +229,9 @@ def get_ip_sn_fabric_dict(inventory_data): sn_fab = {} for device_key in inventory_data.keys(): - ip = inventory_data[device_key].get('ipAddress') - sn = inventory_data[device_key].get('serialNumber') - fabric_name = inventory_data[device_key].get('fabricName') + ip = inventory_data[device_key].get("ipAddress") + sn = inventory_data[device_key].get("serialNumber") + fabric_name = inventory_data[device_key].get("fabricName") ip_fab.update({ip: fabric_name}) sn_fab.update({sn: fabric_name}) @@ -222,9 +245,11 @@ def get_ip_sn_fabric_dict(inventory_data): # returned def dcnm_get_ip_addr_info(module, sw_elem, ip_sn, hn_sn): - msg_dict = {'Error': ''} + msg_dict = {"Error": ""} msg = 'Given switch elem = "{}" is not a valid one for this fabric\n' - msg1 = 'Given switch elem = "{}" cannot be validated, provide a valid ip_sn object\n' + msg1 = ( + 'Given switch elem = "{}" cannot be validated, provide a valid ip_sn object\n' + ) # Check if the given sw_elem is a v4 ip_addr try: @@ -238,47 +263,47 @@ def dcnm_get_ip_addr_info(module, sw_elem, ip_sn, hn_sn): except socket.error: # Not legal ip_addr = [] - if (ip_addr == []): + if ip_addr == []: # Given element is not an IP address. Try DNS or # hostname try: addr_info = socket.getaddrinfo(sw_elem, 0, socket.AF_INET, 0, 0, 0) - if (None is ip_sn): + if None is ip_sn: return addr_info[0][4][0] if addr_info: - if (addr_info[0][4][0] in ip_sn.keys()): + if addr_info[0][4][0] in ip_sn.keys(): return addr_info[0][4][0] else: - msg_dict['Error'] = msg.format(sw_elem) + msg_dict["Error"] = msg.format(sw_elem) raise module.fail_json(msg=json.dumps(msg_dict)) except socket.gaierror: - if (None is ip_sn): - msg_dict['Error'] = msg1.format(sw_elem) + if None is ip_sn: + msg_dict["Error"] = msg1.format(sw_elem) raise module.fail_json(msg=json.dumps(msg_dict)) # This means that the given element is neither an IP # address nor a DNS name. # First look up hn_sn. Get the serial number and look up ip_sn to # get the IP address. sno = None - if (None is not hn_sn): + if None is not hn_sn: sno = hn_sn.get(sw_elem, None) - if (sno is not None): + if sno is not None: ip_addr = [k for k, v in ip_sn.items() if v == sno] else: ip_addr = [k for k, v in ip_sn.items() if v == sw_elem] - if (ip_addr): + if ip_addr: return ip_addr[0] else: - msg_dict['Error'] = msg.format(sw_elem) + msg_dict["Error"] = msg.format(sw_elem) raise module.fail_json(msg=json.dumps(msg_dict)) else: # Given sw_elem is an ip_addr. check if this is valid - if (None is ip_sn): + if None is ip_sn: return ip_addr - if (ip_addr in ip_sn.keys()): + if ip_addr in ip_sn.keys(): return ip_addr else: - msg_dict['Error'] = msg.format(sw_elem) + msg_dict["Error"] = msg.format(sw_elem) raise module.fail_json(msg=json.dumps(msg_dict)) @@ -296,54 +321,54 @@ def get_fabric_details(module, fabric): """ fabric_data = {} rc = False - method = 'GET' - path = '/rest/control/fabrics/{}'.format(fabric) + method = "GET" + path = "/rest/control/fabrics/{0}".format(fabric) conn = Connection(module._socket_path) if conn.get_version() == 12: path = "/appcenter/cisco/ndfc/v1/lan-fabric" + path count = 1 - while (rc is False): + while rc is False: response = dcnm_send(module, method, path) - if not response.get('RETURN_CODE'): + if not response.get("RETURN_CODE"): rc = True module.fail_json(msg=response) - if response.get('RETURN_CODE') == 404: + if response.get("RETURN_CODE") == 404: # RC 404 - Object not found rc = True return fabric_data - if response.get('RETURN_CODE') == 401: + if response.get("RETURN_CODE") == 401: # RC 401: Server not reachable. Retry a few times - if (count <= 20): + if count <= 20: count = count + 1 rc = False time.sleep(0.1) continue - else: - raise Exception(response) - elif response.get('RETURN_CODE') >= 400: + + raise Exception(response) + elif response.get("RETURN_CODE") >= 400: # Handle additional return codes as needed but for now raise # for any error other then 404. raise Exception(response) - fabric_data = response.get('DATA') + fabric_data = response.get("DATA") rc = True return fabric_data -def dcnm_send(module, method, path, data=None, data_type='json'): +def dcnm_send(module, method, path, data=None, data_type="json"): conn = Connection(module._socket_path) - if (data_type == 'json'): + if data_type == "json": return conn.send_request(method, path, data) - elif (data_type == 'text'): + elif data_type == "text": return conn.send_txt_request(method, path, data) @@ -366,15 +391,15 @@ def dcnm_version_supported(module): int: Major software version for DCNM/NDFC """ - method = 'GET' + method = "GET" supported = None data = None paths = ["/fm/fmrest/about/version", "/appcenter/cisco/ndfc/api/about/version"] for path in paths: response = dcnm_send(module, method, path) - if response['RETURN_CODE'] == 200: - data = response.get('DATA') + if response["RETURN_CODE"] == 200: + data = response.get("DATA") break if data: @@ -382,14 +407,14 @@ def dcnm_version_supported(module): # Examples: # 11.5(1), 12.0.1a' # For these examples 11 or 12 would be returned - raw_version = data['version'] + raw_version = data["version"] regex = r"^(\d+)\.\d+" mo = re.search(regex, raw_version) if mo: supported = int(mo.group(1)) if supported is None: - msg = 'Unable to determine the DCNM/NDFC Software Version' + msg = "Unable to determine the DCNM/NDFC Software Version" module.fail_json(msg=msg) return supported @@ -397,9 +422,9 @@ def dcnm_version_supported(module): def parse_response(response): - if response.get('ERROR') == 'Not Found' and response['RETURN_CODE'] == 404: + if response.get("ERROR") == "Not Found" and response["RETURN_CODE"] == 404: return True, False - if response['RETURN_CODE'] != 200 or response['MESSAGE'] != 'OK': + if response["RETURN_CODE"] != 200 or response["MESSAGE"] != "OK": return False, True return False, False @@ -422,7 +447,7 @@ def dcnm_get_url(module, fabric, path, items, module_name): dict: Response DATA from DCNM/NDFC """ - method = 'GET' + method = "GET" send_count = 1 # NDFC/DCNM12 can handle urls upto 6144 characters. @@ -431,21 +456,27 @@ def dcnm_get_url(module, fabric, path, items, module_name): # for query path(url) if sys.getsizeof(items) > 5900: if (sys.getsizeof(items) % 5900) == 0: - send_count = sys.getsizeof(items)/5900 + send_count = sys.getsizeof(items) / 5900 else: - send_count = sys.getsizeof(items)//5900 + 1 + send_count = sys.getsizeof(items) // 5900 + 1 - itemlist = items.split(',') + itemlist = items.split(",") iter = 0 while iter < send_count: if send_count == 1: url = path.format(fabric, items) - elif (iter != (send_count - 1)): - itemstr = ','.join(itemlist[(iter*(len(itemlist)//send_count)):((iter+1)*(len(itemlist)//send_count))]) + elif iter != (send_count - 1): + itemstr = ",".join( + itemlist[ + (iter * (len(itemlist) // send_count)): ( + (iter + 1) * (len(itemlist) // send_count) + ) + ] + ) url = path.format(fabric, itemstr) else: - itemstr = ','.join(itemlist[iter*(len(itemlist)//send_count):]) + itemstr = ",".join(itemlist[iter * (len(itemlist) // send_count):]) url = path.format(fabric, itemstr) att_objects = dcnm_send(module, method, url) @@ -454,8 +485,9 @@ def dcnm_get_url(module, fabric, path, items, module_name): if missing_fabric or not_ok: msg1 = "Fabric {0} not present on DCNM".format(fabric) - msg2 = "Unable to find " \ - "{0}: {1} under fabric: {2}".format(module_name, items[:-1], fabric) + msg2 = "Unable to find " "{0}: {1} under fabric: {2}".format( + module_name, items[:-1], fabric + ) module.fail_json(msg=msg1 if missing_fabric else msg2) return @@ -463,7 +495,7 @@ def dcnm_get_url(module, fabric, path, items, module_name): if iter == 0: attach_objects = att_objects else: - attach_objects['DATA'].extend(att_objects['DATA']) + attach_objects["DATA"].extend(att_objects["DATA"]) iter += 1 diff --git a/plugins/modules/dcnm_interface.py b/plugins/modules/dcnm_interface.py index e11c4a425..9cede625f 100644 --- a/plugins/modules/dcnm_interface.py +++ b/plugins/modules/dcnm_interface.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mallik Mudigonda" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_interface short_description: DCNM Ansible Module for managing interfaces. @@ -104,8 +106,7 @@ required: true access_vlan: description: - - Vlan for the interface. This option is applicable only for interfaces whose 'mode' - is 'access' + - Vlan for the interface. This option is applicable only for interfaces whose 'mode' is 'access' type: str default: "" int_vrf: @@ -122,7 +123,7 @@ description: - IPV4 address mask length. This object is applicable only if the 'mode' is 'l3' type: int - choices : [Min:1, Max:31] + choices: [Min 1, Max 31] default: 8 route_tag: description: @@ -162,14 +163,14 @@ - Port channel identifier of first peer. If this object is not included, then the value defaults to the vPC identifier. This value cannot be changed once vPC is created type: int - choices: [Min:1, Max:4096] + choices: [Min 1, Max 4096] default: Default value is the vPC port identifier peer2_pcid: description: - Port channel identifier of second peer. If this object is not included, then the value defaults to the vPC identifier. This value cannot be changed once vPC is created type: int - choices: [Min:1, Max:4096] + choices: [Min 1, Max 4096] default: Default value is the vPC port identifier peer1_members: description: @@ -285,7 +286,7 @@ description: - IPV4 address mask length. type: int - choices : [Min:8, Max:31] + choices : [Min 8, Max 31] default: 8 ipv6_addr: description: @@ -296,19 +297,19 @@ description: - IPV6 address mask length. type: int - choices : [Min:1, Max:31] + choices : [Min 1, Max 31] default: 8 mtu: description: - Interface MTU type: int - choices: [Min: 576, Max: 9216] + choices: [Min 576, Max 9216] default: 9216 vlan: description: - DOT1Q vlan id for this interface type: int - choices: [Min: 2, Max: 3967] + choices: [Min 2, Max 3967] default: 0 cmds: description: @@ -438,7 +439,7 @@ - IPV4 address mask length. This object is applicable only if the 'mode' is 'routed' or 'epl_routed' type: int - choices : [Min:1, Max:31] + choices : [Min 1, Max 31] default: 8 ipv6_addr: description: @@ -449,7 +450,7 @@ description: - IPV6 address mask length. This object is applicable only if the 'mode' is 'epl_routed' type: int - choices : [Min:1, Max:31] + choices : [Min 1, Max 31] default: 8 route_tag: description: @@ -472,9 +473,9 @@ - Administrative state of the interface type: bool default: true -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # States: # This module supports the following states: @@ -983,7 +984,7 @@ switch: - "192.172.1.1" # provide the switch information where the config is to be deployed -''' +""" import time import json @@ -992,40 +993,46 @@ import sys from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import \ - dcnm_send, get_fabric_inventory_details, dcnm_get_ip_addr_info, validate_list_of_dicts,\ - get_ip_sn_dict, dcnm_version_supported +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + dcnm_send, + get_fabric_inventory_details, + dcnm_get_ip_addr_info, + validate_list_of_dicts, + get_ip_sn_dict, + dcnm_version_supported, +) LOG_ERROR = 0 LOG_DEBUG = 4 LOG_VERBOSE = 5 + class DcnmIntf: dcnm_intf_paths = { 11: { - "VPC_SNO": "/rest/interface/vpcpair_serial_number?serial_number={}", - "IF_WITH_SNO_IFNAME": "/rest/interface?serialNumber={}&ifName={}", - "IF_DETAIL_WITH_SNO": "/rest/interface/detail?serialNumber={}", - "GLOBAL_IF": "/rest/globalInterface", - "GLOBAL_IF_DEPLOY":"/rest/globalInterface/deploy", - "INTERFACE": "/rest/interface", - }, + "VPC_SNO": "/rest/interface/vpcpair_serial_number?serial_number={}", + "IF_WITH_SNO_IFNAME": "/rest/interface?serialNumber={}&ifName={}", + "IF_DETAIL_WITH_SNO": "/rest/interface/detail?serialNumber={}", + "GLOBAL_IF": "/rest/globalInterface", + "GLOBAL_IF_DEPLOY": "/rest/globalInterface/deploy", + "INTERFACE": "/rest/interface", + }, 12: { - "VPC_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/vpcpair_serial_number?serial_number={}", - "IF_WITH_SNO_IFNAME": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface?serialNumber={}&ifName={}", - "IF_DETAIL_WITH_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/detail?serialNumber={}", - "GLOBAL_IF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface", - "GLOBAL_IF_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface/deploy", - "INTERFACE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface", - } + "VPC_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/vpcpair_serial_number?serial_number={}", + "IF_WITH_SNO_IFNAME": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface?serialNumber={}&ifName={}", + "IF_DETAIL_WITH_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/detail?serialNumber={}", + "GLOBAL_IF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface", + "GLOBAL_IF_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface/deploy", + "INTERFACE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface", + }, } def __init__(self, module): self.module = module self.params = module.params - self.fabric = module.params['fabric'] - self.config = copy.deepcopy(module.params.get('config')) + self.fabric = module.params["fabric"] + self.config = copy.deepcopy(module.params.get("config")) self.pb_input = [] self.check_mode = False self.intf_info = [] @@ -1042,7 +1049,17 @@ def __init__(self, module): self.log_verbosity = 0 self.fd = None self.vpc_ip_sn = {} - self.changed_dict = [{'merged': [], 'deleted': [], 'replaced': [], 'overridden': [], 'deploy': [], 'query': [], 'debugs': []}] + self.changed_dict = [ + { + "merged": [], + "deleted": [], + "replaced": [], + "overridden": [], + "deploy": [], + "query": [], + "debugs": [], + } + ] self.dcnm_version = dcnm_version_supported(self.module) @@ -1051,15 +1068,11 @@ def __init__(self, module): self.paths = self.dcnm_intf_paths[self.dcnm_version] self.dcnm_intf_facts = { - 'fabric': module.params['fabric'], - 'config': module.params['config'], + "fabric": module.params["fabric"], + "config": module.params["config"], } - self.result = dict( - changed=False, - diff=[], - response=[] - ) + self.result = dict(changed=False, diff=[], response=[]) # New Interfaces # To map keys from self.have to keys from config @@ -1107,35 +1120,35 @@ def __init__(self, module): # New Interfaces self.pol_types = { 11: { - "pc_monitor": "int_monitor_port_channel_11_1", - "pc_trunk": "int_port_channel_trunk_host_11_1", - "pc_access": "int_port_channel_access_host_11_1", - "pc_l3": "int_l3_port_channel", - "sub_int_subint": "int_subif_11_1", - "lo_lo": "int_loopback_11_1", - "eth_trunk": "int_trunk_host_11_1", - "eth_access": "int_access_host_11_1", - "eth_routed": "int_routed_host_11_1", - "eth_monitor": "int_monitor_ethernet_11_1", - "eth_epl_routed": "epl_routed_intf", - "vpc_trunk": "int_vpc_trunk_host_11_1", - "vpc_access": "int_vpc_access_host_11_1" - }, + "pc_monitor": "int_monitor_port_channel_11_1", + "pc_trunk": "int_port_channel_trunk_host_11_1", + "pc_access": "int_port_channel_access_host_11_1", + "pc_l3": "int_l3_port_channel", + "sub_int_subint": "int_subif_11_1", + "lo_lo": "int_loopback_11_1", + "eth_trunk": "int_trunk_host_11_1", + "eth_access": "int_access_host_11_1", + "eth_routed": "int_routed_host_11_1", + "eth_monitor": "int_monitor_ethernet_11_1", + "eth_epl_routed": "epl_routed_intf", + "vpc_trunk": "int_vpc_trunk_host_11_1", + "vpc_access": "int_vpc_access_host_11_1", + }, 12: { - "pc_monitor": "int_monitor_port_channel", - "pc_trunk": "int_port_channel_trunk_host", - "pc_access": "int_port_channel_access_host", - "pc_l3": "int_l3_port_channel", - "sub_int_subint": "int_subif", - "lo_lo": "int_loopback", - "eth_trunk": "int_trunk_host", - "eth_access": "int_access_host", - "eth_routed": "int_routed_host", - "eth_monitor": "int_monitor_ethernet", - "eth_epl_routed": "epl_routed_intf", - "vpc_trunk": "int_vpc_trunk_host", - "vpc_access": "int_vpc_access_host" - } + "pc_monitor": "int_monitor_port_channel", + "pc_trunk": "int_port_channel_trunk_host", + "pc_access": "int_port_channel_access_host", + "pc_l3": "int_l3_port_channel", + "sub_int_subint": "int_subif", + "lo_lo": "int_loopback", + "eth_trunk": "int_trunk_host", + "eth_access": "int_access_host", + "eth_routed": "int_routed_host", + "eth_monitor": "int_monitor_ethernet", + "eth_epl_routed": "epl_routed_intf", + "vpc_trunk": "int_vpc_trunk_host", + "vpc_access": "int_vpc_access_host", + }, } # New Interfaces @@ -1144,7 +1157,7 @@ def __init__(self, module): "vpc": "INTERFACE_VPC", "sub_int": "SUBINTERFACE", "lo": "INTERFACE_LOOPBACK", - "eth": "INTERFACE_ETHERNET" + "eth": "INTERFACE_ETHERNET", } # New Interfaces @@ -1153,8 +1166,7 @@ def __init__(self, module): "INTERFACE_VPC": 1, "INTERFACE_ETHERNET": 2, "INTERFACE_LOOPBACK": 3, - "SUBINTERFACE": 4 - + "SUBINTERFACE": 4, } def log_msg(self, msg): @@ -1169,66 +1181,70 @@ def log_msg(self, msg): # New Interfaces def dcnm_intf_get_if_name(self, name, if_type): - if ('pc' == if_type): - port_id = re.findall(r'\d+', name) + if "pc" == if_type: + port_id = re.findall(r"\d+", name) return ("Port-channel" + str(port_id[0]), port_id[0]) - if ('vpc' == if_type): - port_id = re.findall(r'\d+', name) + if "vpc" == if_type: + port_id = re.findall(r"\d+", name) return ("vPC" + str(port_id[0]), port_id[0]) - if ('sub_int' == if_type): - port_id = re.findall(r'\d+\/\d+.\d+', name) + if "sub_int" == if_type: + port_id = re.findall(r"\d+\/\d+.\d+", name) return ("Ethernet" + str(port_id[0]), port_id[0]) - if ('lo' == if_type): - port_id = re.findall(r'\d+', name) + if "lo" == if_type: + port_id = re.findall(r"\d+", name) return ("Loopback" + str(port_id[0]), port_id[0]) - if ('eth' == if_type): - port_id = re.findall(r'\d+\/\d+', name) + if "eth" == if_type: + port_id = re.findall(r"\d+\/\d+", name) return ("Ethernet" + str(port_id[0]), port_id[0]) def dcnm_intf_get_vpc_serial_number(self, sw): path = self.paths["VPC_SNO"].format(self.ip_sn[sw]) - resp = dcnm_send(self.module, 'GET', path) + resp = dcnm_send(self.module, "GET", path) - if (resp and resp['RETURN_CODE'] == 200): - return resp['DATA']['vpc_pair_sn'] + if resp and resp["RETURN_CODE"] == 200: + return resp["DATA"]["vpc_pair_sn"] else: - return '' + return "" # Flatten the incoming config database and have the required fileds updated. # This modified config DB will be used while creating payloads. To avoid # messing up the incoming config make a copy of it. def dcnm_intf_copy_config(self): - if (None is self.config): + if None is self.config: return for cfg in self.config: - if(None is cfg.get('switch', None)): + if None is cfg.get("switch", None): continue - for sw in cfg['switch']: + for sw in cfg["switch"]: c = copy.deepcopy(cfg) # Add type of interface ckeys = list(cfg.keys()) for ck in ckeys: - if (ck.startswith('profile')): + if ck.startswith("profile"): - if ('type' not in cfg): - self.module.fail_json(msg=' element, which is mandatory is missing in config') + if "type" not in cfg: + self.module.fail_json( + msg=" element, which is mandatory is missing in config" + ) - pol_ind_str = cfg['type'] + '_' + cfg['profile']['mode'] + pol_ind_str = cfg["type"] + "_" + cfg["profile"]["mode"] - c[ck]['fabric'] = self.dcnm_intf_facts['fabric'] - if (cfg['type'] == 'vpc'): - c[ck]['sno'] = self.vpc_ip_sn[sw] + c[ck]["fabric"] = self.dcnm_intf_facts["fabric"] + if cfg["type"] == "vpc": + c[ck]["sno"] = self.vpc_ip_sn[sw] else: - c[ck]['sno'] = self.ip_sn[sw] - ifname, port_id = self.dcnm_intf_get_if_name(c['name'], c['type']) - c[ck]['ifname'] = ifname - c[ck]['policy'] = self.pol_types[self.dcnm_version][pol_ind_str] + c[ck]["sno"] = self.ip_sn[sw] + ifname, port_id = self.dcnm_intf_get_if_name( + c["name"], c["type"] + ) + c[ck]["ifname"] = ifname + c[ck]["policy"] = self.pol_types[self.dcnm_version][pol_ind_str] self.pb_input.append(c[ck]) def dcnm_intf_validate_interface_input(self, config, common_spec, prof_spec): @@ -1237,161 +1253,173 @@ def dcnm_intf_validate_interface_input(self, config, common_spec, prof_spec): intf_info, invalid_params = validate_list_of_dicts(config, common_spec) if invalid_params: - mesg = 'Invalid parameters in playbook: {}'.format("while processing interface " + config[0]['name'] + '\n' + '\n'.join(invalid_params)) + mesg = "Invalid parameters in playbook: {0}".format( + "while processing interface " + + config[0]["name"] + + "\n" + + "\n".join(invalid_params) + ) self.module.fail_json(msg=mesg) self.intf_info.extend(intf_info) - if (prof_spec is not None): + if prof_spec is not None: for item in intf_info: - plist.append(item['profile']) + plist.append(item["profile"]) intf_profile, invalid_params = validate_list_of_dicts(plist, prof_spec) # Merge the info from the intf_profile into the intf_info to have a single dict to be used for building # payloads - item['profile'].update(intf_profile[0]) + item["profile"].update(intf_profile[0]) - plist.remove(item['profile']) + plist.remove(item["profile"]) if invalid_params: - mesg = 'Invalid parameters in playbook: {}'.format("while processing interface " + config[0]['name'] + '\n' + '\n'.join(invalid_params)) + mesg = "Invalid parameters in playbook: {0}".format( + "while processing interface " + + config[0]["name"] + + "\n" + + "\n".join(invalid_params) + ) self.module.fail_json(msg=mesg) def dcnm_intf_validate_port_channel_input(self, config): pc_spec = dict( - name=dict(required=True, type='str'), - switch=dict(required=True, type='list'), - type=dict(required=True, type='str'), - deploy=dict(type='bool', default=True), - profile=dict(required=True, type='dict') + name=dict(required=True, type="str"), + switch=dict(required=True, type="list"), + type=dict(required=True, type="str"), + deploy=dict(type="bool", default=True), + profile=dict(required=True, type="dict"), ) pc_prof_spec_trunk = dict( - mode=dict(required=True, type='str'), - members=dict(type='list'), - pc_mode=dict(type='str', default='active'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - allowed_vlans=dict(type='str', default='none'), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + members=dict(type="list"), + pc_mode=dict(type="str", default="active"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + allowed_vlans=dict(type="str", default="none"), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) pc_prof_spec_access = dict( - mode=dict(required=True, type='str'), - members=dict(type='list'), - pc_mode=dict(type='str', default='active'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - access_vlan=dict(type='str', default=''), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + members=dict(type="list"), + pc_mode=dict(type="str", default="active"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + access_vlan=dict(type="str", default=""), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) pc_prof_spec_l3 = dict( - mode=dict(required=True, type='str'), - members=dict(type='list'), - pc_mode=dict(type='str', default='active'), - int_vrf=dict(type='str', default='default'), - ipv4_addr=dict(type='ipv4', default=''), - ipv4_mask_len=dict(type='int', default=8), - route_tag=dict(type='str', default=''), - mtu=dict(type='int', default=9216, range_min=576, range_max=9216), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + members=dict(type="list"), + pc_mode=dict(type="str", default="active"), + int_vrf=dict(type="str", default="default"), + ipv4_addr=dict(type="ipv4", default=""), + ipv4_mask_len=dict(type="int", default=8), + route_tag=dict(type="str", default=""), + mtu=dict(type="int", default=9216, range_min=576, range_max=9216), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) - if ('trunk' == config[0]['profile']['mode']): + if "trunk" == config[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(config, pc_spec, pc_prof_spec_trunk) - if ('access' == config[0]['profile']['mode']): - self.dcnm_intf_validate_interface_input(config, pc_spec, pc_prof_spec_access) - if ('l3' == config[0]['profile']['mode']): + if "access" == config[0]["profile"]["mode"]: + self.dcnm_intf_validate_interface_input( + config, pc_spec, pc_prof_spec_access + ) + if "l3" == config[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(config, pc_spec, pc_prof_spec_l3) - if ('monitor' == config[0]['profile']['mode']): + if "monitor" == config[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(config, pc_spec, None) def dcnm_intf_validate_virtual_port_channel_input(self, cfg): vpc_spec = dict( - name=dict(required=True, type='str'), - switch=dict(required=True, type='list'), - type=dict(required=True, type='str'), - deploy=dict(type='str', default=True), - profile=dict(required=True, type='dict') + name=dict(required=True, type="str"), + switch=dict(required=True, type="list"), + type=dict(required=True, type="str"), + deploy=dict(type="str", default=True), + profile=dict(required=True, type="dict"), ) vpc_prof_spec_trunk = dict( - mode=dict(required=True, type='str'), - peer1_pcid=dict(type='int', default=0, range_min=1, range_max=4096), - peer2_pcid=dict(type='int', default=0, range_min=1, range_max=4096), - peer1_members=dict(type='list'), - peer2_members=dict(type='list'), - pc_mode=dict(type='str', default='active'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - peer1_allowed_vlans=dict(type='str', default='none'), - peer2_allowed_vlans=dict(type='str', default='none'), - peer1_cmds=dict(type='list'), - peer2_cmds=dict(type='list'), - peer1_description=dict(type='str', default=''), - peer2_description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + peer1_pcid=dict(type="int", default=0, range_min=1, range_max=4096), + peer2_pcid=dict(type="int", default=0, range_min=1, range_max=4096), + peer1_members=dict(type="list"), + peer2_members=dict(type="list"), + pc_mode=dict(type="str", default="active"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + peer1_allowed_vlans=dict(type="str", default="none"), + peer2_allowed_vlans=dict(type="str", default="none"), + peer1_cmds=dict(type="list"), + peer2_cmds=dict(type="list"), + peer1_description=dict(type="str", default=""), + peer2_description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) vpc_prof_spec_access = dict( - mode=dict(required=True, type='str'), - peer1_pcid=dict(type='int', default=0, range_min=1, range_max=4096), - peer2_pcid=dict(type='int', default=0, range_min=1, range_max=4096), - peer1_members=dict(type='list'), - peer2_members=dict(type='list'), - pc_mode=dict(type='str', default='active'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - peer1_access_vlan=dict(type='str', default=''), - peer2_access_vlan=dict(type='str', default=''), - peer1_cmds=dict(type='list'), - peer2_cmds=dict(type='list'), - peer1_description=dict(type='str', default=''), - peer2_description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + peer1_pcid=dict(type="int", default=0, range_min=1, range_max=4096), + peer2_pcid=dict(type="int", default=0, range_min=1, range_max=4096), + peer1_members=dict(type="list"), + peer2_members=dict(type="list"), + pc_mode=dict(type="str", default="active"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + peer1_access_vlan=dict(type="str", default=""), + peer2_access_vlan=dict(type="str", default=""), + peer1_cmds=dict(type="list"), + peer2_cmds=dict(type="list"), + peer1_description=dict(type="str", default=""), + peer2_description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) - if ('trunk' == cfg[0]['profile']['mode']): + if "trunk" == cfg[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(cfg, vpc_spec, vpc_prof_spec_trunk) - if ('access' == cfg[0]['profile']['mode']): + if "access" == cfg[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(cfg, vpc_spec, vpc_prof_spec_access) def dcnm_intf_validate_sub_interface_input(self, cfg): sub_spec = dict( - name=dict(required=True, type='str'), - switch=dict(required=True, type='list'), - type=dict(required=True, type='str'), - deploy=dict(type='str', default=True), - profile=dict(required=True, type='dict'), + name=dict(required=True, type="str"), + switch=dict(required=True, type="list"), + type=dict(required=True, type="str"), + deploy=dict(type="str", default=True), + profile=dict(required=True, type="dict"), ) sub_prof_spec = dict( - mode=dict(required=True, type='str'), - vlan=dict(required=True, type='int', range_min=2, range_max=3967), - ipv4_addr=dict(required=True, type='ipv4'), - ipv4_mask_len=dict(required=True, type='int', range_min=8, range_max=31), - int_vrf=dict(type='str', default='default'), - ipv6_addr=dict(type='ipv6', default=''), - ipv6_mask_len=dict(type='int', range_min=64, range_max=127, default=64), - mtu=dict(type='int', range_min=576, range_max=9216, default=9216), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + vlan=dict(required=True, type="int", range_min=2, range_max=3967), + ipv4_addr=dict(required=True, type="ipv4"), + ipv4_mask_len=dict(required=True, type="int", range_min=8, range_max=31), + int_vrf=dict(type="str", default="default"), + ipv6_addr=dict(type="ipv6", default=""), + ipv6_mask_len=dict(type="int", range_min=64, range_max=127, default=64), + mtu=dict(type="int", range_min=576, range_max=9216, default=9216), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) self.dcnm_intf_validate_interface_input(cfg, sub_spec, sub_prof_spec) @@ -1399,22 +1427,22 @@ def dcnm_intf_validate_sub_interface_input(self, cfg): def dcnm_intf_validate_loopback_interface_input(self, cfg): lo_spec = dict( - name=dict(required=True, type='str'), - switch=dict(required=True, type='list'), - type=dict(required=True, type='str'), - deploy=dict(type='str', default=True), - profile=dict(required=True, type='dict'), + name=dict(required=True, type="str"), + switch=dict(required=True, type="list"), + type=dict(required=True, type="str"), + deploy=dict(type="str", default=True), + profile=dict(required=True, type="dict"), ) lo_prof_spec = dict( - mode=dict(required=True, type='str'), - ipv4_addr=dict(required=True, type='ipv4'), - int_vrf=dict(type='str', default='default'), - ipv6_addr=dict(type='ipv6', default=''), - route_tag=dict(type='str', default=''), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + ipv4_addr=dict(required=True, type="ipv4"), + int_vrf=dict(type="str", default="default"), + ipv6_addr=dict(type="ipv6", default=""), + route_tag=dict(type="str", default=""), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) self.dcnm_intf_validate_interface_input(cfg, lo_spec, lo_prof_spec) @@ -1422,79 +1450,83 @@ def dcnm_intf_validate_loopback_interface_input(self, cfg): def dcnm_intf_validate_ethernet_interface_input(self, cfg): eth_spec = dict( - name=dict(required=True, type='str'), - switch=dict(required=True, type='list'), - type=dict(required=True, type='str'), - deploy=dict(type='str', default=True), - profile=dict(required=True, type='dict'), + name=dict(required=True, type="str"), + switch=dict(required=True, type="list"), + type=dict(required=True, type="str"), + deploy=dict(type="str", default=True), + profile=dict(required=True, type="dict"), ) eth_prof_spec_trunk = dict( - mode=dict(required=True, type='str'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - speed=dict(type='str', default="Auto"), - allowed_vlans=dict(type='str', default='none'), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + speed=dict(type="str", default="Auto"), + allowed_vlans=dict(type="str", default="none"), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) eth_prof_spec_access = dict( - mode=dict(required=True, type='str'), - bpdu_guard=dict(type='str', default='true'), - port_type_fast=dict(type='bool', default=True), - mtu=dict(type='str', default='jumbo'), - speed=dict(type='str', default="Auto"), - access_vlan=dict(type='str', default=''), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + bpdu_guard=dict(type="str", default="true"), + port_type_fast=dict(type="bool", default=True), + mtu=dict(type="str", default="jumbo"), + speed=dict(type="str", default="Auto"), + access_vlan=dict(type="str", default=""), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) eth_prof_spec_routed_host = dict( - int_vrf=dict(type='str', default='default'), - ipv4_addr=dict(type='ipv4', default=''), - ipv4_mask_len=dict(type='int', default=8), - route_tag=dict(type='str', default=''), - mtu=dict(type='int', default=9216, range_min=576, range_max=9216), - speed=dict(type='str', default="Auto"), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + int_vrf=dict(type="str", default="default"), + ipv4_addr=dict(type="ipv4", default=""), + ipv4_mask_len=dict(type="int", default=8), + route_tag=dict(type="str", default=""), + mtu=dict(type="int", default=9216, range_min=576, range_max=9216), + speed=dict(type="str", default="Auto"), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) eth_prof_spec_epl_routed_host = dict( - mode=dict(required=True, type='str'), - ipv4_addr=dict(required=True, type='ipv4'), - ipv4_mask_len=dict(type='int', default=8), - ipv6_addr=dict(type='ipv6', default=''), - ipv6_mask_len=dict(type='int', range_min=64, range_max=127, default=64), - route_tag=dict(type='str', default=''), - mtu=dict(type='int', default=1500, range_max=9216), - speed=dict(type='str', default="Auto"), - cmds=dict(type='list'), - description=dict(type='str', default=''), - admin_state=dict(type='bool', default=True) + mode=dict(required=True, type="str"), + ipv4_addr=dict(required=True, type="ipv4"), + ipv4_mask_len=dict(type="int", default=8), + ipv6_addr=dict(type="ipv6", default=""), + ipv6_mask_len=dict(type="int", range_min=64, range_max=127, default=64), + route_tag=dict(type="str", default=""), + mtu=dict(type="int", default=1500, range_max=9216), + speed=dict(type="str", default="Auto"), + cmds=dict(type="list"), + description=dict(type="str", default=""), + admin_state=dict(type="bool", default=True), ) - if ('trunk' == cfg[0]['profile']['mode']): + if "trunk" == cfg[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(cfg, eth_spec, eth_prof_spec_trunk) - if ('access' == cfg[0]['profile']['mode']): + if "access" == cfg[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(cfg, eth_spec, eth_prof_spec_access) - if ('routed' == cfg[0]['profile']['mode']): - self.dcnm_intf_validate_interface_input(cfg, eth_spec, eth_prof_spec_routed_host) - if ('monitor' == cfg[0]['profile']['mode']): + if "routed" == cfg[0]["profile"]["mode"]: + self.dcnm_intf_validate_interface_input( + cfg, eth_spec, eth_prof_spec_routed_host + ) + if "monitor" == cfg[0]["profile"]["mode"]: self.dcnm_intf_validate_interface_input(cfg, eth_spec, None) - if ('epl_routed' == cfg[0]['profile']['mode']): - self.dcnm_intf_validate_interface_input(cfg, eth_spec, eth_prof_spec_epl_routed_host) + if "epl_routed" == cfg[0]["profile"]["mode"]: + self.dcnm_intf_validate_interface_input( + cfg, eth_spec, eth_prof_spec_epl_routed_host + ) def dcnm_intf_validate_delete_state_input(self, cfg): del_spec = dict( - name=dict(required=False, type='str'), - switch=dict(required=False, type='list'), + name=dict(required=False, type="str"), + switch=dict(required=False, type="list"), ) self.dcnm_intf_validate_interface_input(cfg, del_spec, None) @@ -1502,8 +1534,8 @@ def dcnm_intf_validate_delete_state_input(self, cfg): def dcnm_intf_validate_query_state_input(self, cfg): query_spec = dict( - name=dict(type='str', default=''), - switch=dict(required=True, type='list'), + name=dict(type="str", default=""), + switch=dict(required=True, type="list"), ) self.dcnm_intf_validate_interface_input(cfg, query_spec, None) @@ -1511,8 +1543,8 @@ def dcnm_intf_validate_query_state_input(self, cfg): def dcnm_intf_validate_overridden_state_input(self, cfg): overridden_spec = dict( - name=dict(required=False, type='str', default=''), - switch=dict(required=False, type='list'), + name=dict(required=False, type="str", default=""), + switch=dict(required=False, type="list"), ) self.dcnm_intf_validate_interface_input(cfg, overridden_spec, None) @@ -1521,7 +1553,7 @@ def dcnm_intf_validate_overridden_state_input(self, cfg): def dcnm_intf_validate_input(self): """Parse the playbook values, validate to param specs.""" - if (None is self.config): + if None is self.config: return # Inputs will vary for each type of interface and for each state. Make specific checks @@ -1534,32 +1566,37 @@ def dcnm_intf_validate_input(self): cfg.append(citem) - if (self.module.params['state'] == 'deleted'): + if self.module.params["state"] == "deleted": # config for delete state is different for all interafces. It may not have the profile # construct. So validate deleted state differently self.dcnm_intf_validate_delete_state_input(cfg) - elif (self.module.params['state'] == 'query'): + elif self.module.params["state"] == "query": # config for query state is different for all interafces. It may not have the profile # construct. So validate query state differently self.dcnm_intf_validate_query_state_input(cfg) - elif ((self.module.params['state'] == 'overridden') and not (any('profile' in key for key in item))): + elif (self.module.params["state"] == "overridden") and not ( + any("profile" in key for key in item) + ): # config for overridden state is different for all interafces. It may not have the profile # construct. So validate overridden state differently self.dcnm_intf_validate_overridden_state_input(cfg) else: - if ('type' not in item): - mesg = 'Invalid parameters in playbook: {}'.format("while processing interface " + item['name'] + '\n' 'mandatory object "type" missing') + if "type" not in item: + mesg = "Invalid parameters in playbook: {0}".format( + "while processing interface " + item["name"] + "\n" + 'mandatory object "type" missing' + ) self.module.fail_json(msg=mesg) - if (item['type'] == 'pc'): + if item["type"] == "pc": self.dcnm_intf_validate_port_channel_input(cfg) - if (item['type'] == 'vpc'): + if item["type"] == "vpc": self.dcnm_intf_validate_virtual_port_channel_input(cfg) - if (item['type'] == 'sub_int'): + if item["type"] == "sub_int": self.dcnm_intf_validate_sub_interface_input(cfg) - if (item['type'] == 'lo'): + if item["type"] == "lo": self.dcnm_intf_validate_loopback_interface_input(cfg) - if (item['type'] == 'eth'): + if item["type"] == "eth": self.dcnm_intf_validate_ethernet_interface_input(cfg) cfg.remove(citem) @@ -1567,231 +1604,333 @@ def dcnm_intf_get_pc_payload(self, delem, intf, profile): # Extract port id from the given name, which is of the form 'po300' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) - if (delem[profile]['mode'] == 'trunk'): - if (delem[profile]['members'] is None): + if delem[profile]["mode"] == "trunk": + if delem[profile]["members"] is None: intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join(delem[profile]['members']) - intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]['pc_mode'] - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["ALLOWED_VLANS"] = delem[profile]['allowed_vlans'] + intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join( + delem[profile]["members"] + ) + intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]["pc_mode"] + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["ALLOWED_VLANS"] = delem[profile][ + "allowed_vlans" + ] intf["interfaces"][0]["nvPairs"]["PO_ID"] = ifname - if (delem[profile]['mode'] == 'access'): - if (delem[profile]['members'] is None): + if delem[profile]["mode"] == "access": + if delem[profile]["members"] is None: intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join(delem[profile]['members']) - intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]['pc_mode'] - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["ACCESS_VLAN"] = delem[profile]['access_vlan'] + intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join( + delem[profile]["members"] + ) + intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]["pc_mode"] + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["ACCESS_VLAN"] = delem[profile][ + "access_vlan" + ] intf["interfaces"][0]["nvPairs"]["PO_ID"] = ifname - if (delem[profile]['mode'] == 'l3'): - if (delem[profile]['members'] is None): + if delem[profile]["mode"] == "l3": + if delem[profile]["members"] is None: intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join(delem[profile]['members']) - intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]['pc_mode'] - intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]['int_vrf'] - intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]['ipv4_addr']) - if (delem[profile]['ipv4_addr'] != ''): - intf["interfaces"][0]["nvPairs"]["PREFIX"] = str(delem[profile]['ipv4_mask_len']) + intf["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"] = ",".join( + delem[profile]["members"] + ) + intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]["pc_mode"] + intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]["int_vrf"] + intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]["ipv4_addr"]) + if delem[profile]["ipv4_addr"] != "": + intf["interfaces"][0]["nvPairs"]["PREFIX"] = str( + delem[profile]["ipv4_mask_len"] + ) else: - intf["interfaces"][0]["nvPairs"]["PREFIX"] = '' - intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile]['route_tag'] + intf["interfaces"][0]["nvPairs"]["PREFIX"] = "" + intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile][ + "route_tag" + ] intf["interfaces"][0]["nvPairs"]["PO_ID"] = ifname - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - if (delem[profile]['mode'] == 'monitor'): + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + if delem[profile]["mode"] == "monitor": intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] != 'monitor'): - intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]['description'] - if (delem[profile]['cmds'] is None): + if delem[profile]["mode"] != "monitor": + intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]["description"] + if delem[profile]["cmds"] is None: intf["interfaces"][0]["nvPairs"]["CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]['cmds']) - intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str(delem[profile]['admin_state']).lower() + intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join( + delem[profile]["cmds"] + ) + intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str( + delem[profile]["admin_state"] + ).lower() def dcnm_intf_get_vpc_payload(self, delem, intf, profile): # Extract port id from the given name, which is of the form 'vpc300' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) - if (delem[profile]['mode'] == 'trunk'): + if delem[profile]["mode"] == "trunk": - if (delem[profile]['peer1_members'] is None): + if delem[profile]["peer1_members"] is None: intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = ",".join(delem[profile]['peer1_members']) + intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = ",".join( + delem[profile]["peer1_members"] + ) - if (delem[profile]['peer2_members'] is None): + if delem[profile]["peer2_members"] is None: intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = ",".join(delem[profile]['peer2_members']) - - intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]['pc_mode'] - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["PEER1_ALLOWED_VLANS"] = delem[profile]['peer1_allowed_vlans'] - intf["interfaces"][0]["nvPairs"]["PEER2_ALLOWED_VLANS"] = delem[profile]['peer2_allowed_vlans'] - - if (delem[profile]["peer1_pcid"] == 0): + intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = ",".join( + delem[profile]["peer2_members"] + ) + + intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]["pc_mode"] + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["PEER1_ALLOWED_VLANS"] = delem[profile][ + "peer1_allowed_vlans" + ] + intf["interfaces"][0]["nvPairs"]["PEER2_ALLOWED_VLANS"] = delem[profile][ + "peer2_allowed_vlans" + ] + + if delem[profile]["peer1_pcid"] == 0: intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str(port_id) else: - intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str(delem[profile]["peer1_pcid"]) + intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str( + delem[profile]["peer1_pcid"] + ) - if (delem[profile]["peer2_pcid"] == 0): + if delem[profile]["peer2_pcid"] == 0: intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str(port_id) else: - intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str(delem[profile]["peer2_pcid"]) + intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str( + delem[profile]["peer2_pcid"] + ) - if (delem[profile]['mode'] == 'access'): + if delem[profile]["mode"] == "access": - if (delem[profile]['peer1_members'] is None): + if delem[profile]["peer1_members"] is None: intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = ",".join(delem[profile]['peer1_members']) + intf["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"] = ",".join( + delem[profile]["peer1_members"] + ) - if (delem[profile]['peer2_members'] is None): + if delem[profile]["peer2_members"] is None: intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = ",".join(delem[profile]['peer2_members']) - - intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]['pc_mode'] - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["PEER1_ACCESS_VLAN"] = delem[profile]['peer1_access_vlan'] - intf["interfaces"][0]["nvPairs"]["PEER2_ACCESS_VLAN"] = delem[profile]['peer2_access_vlan'] - - if (delem[profile]["peer1_pcid"] == 0): + intf["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"] = ",".join( + delem[profile]["peer2_members"] + ) + + intf["interfaces"][0]["nvPairs"]["PC_MODE"] = delem[profile]["pc_mode"] + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["PEER1_ACCESS_VLAN"] = delem[profile][ + "peer1_access_vlan" + ] + intf["interfaces"][0]["nvPairs"]["PEER2_ACCESS_VLAN"] = delem[profile][ + "peer2_access_vlan" + ] + + if delem[profile]["peer1_pcid"] == 0: intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str(port_id) else: - intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str(delem[profile]["peer1_pcid"]) + intf["interfaces"][0]["nvPairs"]["PEER1_PCID"] = str( + delem[profile]["peer1_pcid"] + ) - if (delem[profile]["peer2_pcid"] == 0): + if delem[profile]["peer2_pcid"] == 0: intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str(port_id) else: - intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str(delem[profile]["peer2_pcid"]) - - intf["interfaces"][0]["nvPairs"]["PEER1_PO_DESC"] = delem[profile]['peer1_description'] - intf["interfaces"][0]["nvPairs"]["PEER2_PO_DESC"] = delem[profile]['peer2_description'] - if (delem[profile]['peer1_cmds'] is None): + intf["interfaces"][0]["nvPairs"]["PEER2_PCID"] = str( + delem[profile]["peer2_pcid"] + ) + + intf["interfaces"][0]["nvPairs"]["PEER1_PO_DESC"] = delem[profile][ + "peer1_description" + ] + intf["interfaces"][0]["nvPairs"]["PEER2_PO_DESC"] = delem[profile][ + "peer2_description" + ] + if delem[profile]["peer1_cmds"] is None: intf["interfaces"][0]["nvPairs"]["PEER1_PO_CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER1_PO_CONF"] = "\n".join(delem[profile]['peer1_cmds']) - if (delem[profile]['peer2_cmds'] is None): + intf["interfaces"][0]["nvPairs"]["PEER1_PO_CONF"] = "\n".join( + delem[profile]["peer1_cmds"] + ) + if delem[profile]["peer2_cmds"] is None: intf["interfaces"][0]["nvPairs"]["PEER2_PO_CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["PEER2_PO_CONF"] = "\n".join(delem[profile]['peer2_cmds']) - intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str(delem[profile]['admin_state']).lower() + intf["interfaces"][0]["nvPairs"]["PEER2_PO_CONF"] = "\n".join( + delem[profile]["peer2_cmds"] + ) + intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str( + delem[profile]["admin_state"] + ).lower() intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname def dcnm_intf_get_sub_intf_payload(self, delem, intf, profile): # Extract port id from the given name, which is of the form 'po300' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) - intf["interfaces"][0]["nvPairs"]["VLAN"] = str(delem[profile]['vlan']) - intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]['int_vrf'] - intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]['ipv4_addr']) - intf["interfaces"][0]["nvPairs"]["PREFIX"] = str(delem[profile]['ipv4_mask_len']) - if (delem[profile]['ipv6_addr']): - intf["interfaces"][0]["nvPairs"]["IPv6"] = str(delem[profile]['ipv6_addr']) - intf["interfaces"][0]["nvPairs"]["IPv6_PREFIX"] = str(delem[profile]['ipv6_mask_len']) + intf["interfaces"][0]["nvPairs"]["VLAN"] = str(delem[profile]["vlan"]) + intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]["int_vrf"] + intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]["ipv4_addr"]) + intf["interfaces"][0]["nvPairs"]["PREFIX"] = str( + delem[profile]["ipv4_mask_len"] + ) + if delem[profile]["ipv6_addr"]: + intf["interfaces"][0]["nvPairs"]["IPv6"] = str(delem[profile]["ipv6_addr"]) + intf["interfaces"][0]["nvPairs"]["IPv6_PREFIX"] = str( + delem[profile]["ipv6_mask_len"] + ) else: intf["interfaces"][0]["nvPairs"]["IPv6"] = "" intf["interfaces"][0]["nvPairs"]["IPv6_PREFIX"] = "" - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]['description'] - if (delem[profile]['cmds'] is None): + intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]["description"] + if delem[profile]["cmds"] is None: intf["interfaces"][0]["nvPairs"]["CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]['cmds']) - intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str(delem[profile]['admin_state']).lower() + intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]["cmds"]) + intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str( + delem[profile]["admin_state"] + ).lower() def dcnm_intf_get_loopback_payload(self, delem, intf, profile): # Extract port id from the given name, which is of the form 'po300' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) - intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]['int_vrf'] - intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]['ipv4_addr']) - intf["interfaces"][0]["nvPairs"]["V6IP"] = str(delem[profile]['ipv6_addr']) - intf["interfaces"][0]["nvPairs"]["ROUTE_MAP_TAG"] = delem[profile]['route_tag'] + intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]["int_vrf"] + intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]["ipv4_addr"]) + intf["interfaces"][0]["nvPairs"]["V6IP"] = str(delem[profile]["ipv6_addr"]) + intf["interfaces"][0]["nvPairs"]["ROUTE_MAP_TAG"] = delem[profile]["route_tag"] intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]['description'] - if (delem[profile]['cmds'] is None): + intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]["description"] + if delem[profile]["cmds"] is None: intf["interfaces"][0]["nvPairs"]["CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]['cmds']) - intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str(delem[profile]['admin_state']).lower() + intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]["cmds"]) + intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str( + delem[profile]["admin_state"] + ).lower() def dcnm_intf_get_eth_payload(self, delem, intf, profile): # Extract port id from the given name, which is of the form 'po300' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) - if (delem[profile]['mode'] == 'trunk'): - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]['speed']) - intf["interfaces"][0]["nvPairs"]["ALLOWED_VLANS"] = delem[profile]['allowed_vlans'] + if delem[profile]["mode"] == "trunk": + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]["speed"]) + intf["interfaces"][0]["nvPairs"]["ALLOWED_VLANS"] = delem[profile][ + "allowed_vlans" + ] intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] == 'access'): - intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile]['bpdu_guard'].lower() - intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str(delem[profile]['port_type_fast']).lower() - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]['speed']) - intf["interfaces"][0]["nvPairs"]["ACCESS_VLAN"] = delem[profile]['access_vlan'] + if delem[profile]["mode"] == "access": + intf["interfaces"][0]["nvPairs"]["BPDUGUARD_ENABLED"] = delem[profile][ + "bpdu_guard" + ].lower() + intf["interfaces"][0]["nvPairs"]["PORTTYPE_FAST_ENABLED"] = str( + delem[profile]["port_type_fast"] + ).lower() + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]["speed"]) + intf["interfaces"][0]["nvPairs"]["ACCESS_VLAN"] = delem[profile][ + "access_vlan" + ] intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] == 'routed'): - intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]['int_vrf'] - intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]['ipv4_addr']) - if (delem[profile]['ipv4_addr'] != ''): - intf["interfaces"][0]["nvPairs"]["PREFIX"] = str(delem[profile]['ipv4_mask_len']) + if delem[profile]["mode"] == "routed": + intf["interfaces"][0]["nvPairs"]["INTF_VRF"] = delem[profile]["int_vrf"] + intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]["ipv4_addr"]) + if delem[profile]["ipv4_addr"] != "": + intf["interfaces"][0]["nvPairs"]["PREFIX"] = str( + delem[profile]["ipv4_mask_len"] + ) else: - intf["interfaces"][0]["nvPairs"]["PREFIX"] = '' - intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile]['route_tag'] - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]['speed']) + intf["interfaces"][0]["nvPairs"]["PREFIX"] = "" + intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile][ + "route_tag" + ] + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]["speed"]) intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] == 'monitor'): + if delem[profile]["mode"] == "monitor": intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] == 'epl_routed'): - intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]['ipv4_addr']) - intf["interfaces"][0]["nvPairs"]["PREFIX"] = str(delem[profile]['ipv4_mask_len']) - intf["interfaces"][0]["nvPairs"]["IPv6"] = str(delem[profile]['ipv6_addr']) - intf["interfaces"][0]["nvPairs"]["IPv6_PREFIX"] = str(delem[profile]['ipv6_mask_len']) - intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile]['route_tag'] - intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]['mtu']) - intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]['speed']) + if delem[profile]["mode"] == "epl_routed": + intf["interfaces"][0]["nvPairs"]["IP"] = str(delem[profile]["ipv4_addr"]) + intf["interfaces"][0]["nvPairs"]["PREFIX"] = str( + delem[profile]["ipv4_mask_len"] + ) + intf["interfaces"][0]["nvPairs"]["IPv6"] = str(delem[profile]["ipv6_addr"]) + intf["interfaces"][0]["nvPairs"]["IPv6_PREFIX"] = str( + delem[profile]["ipv6_mask_len"] + ) + intf["interfaces"][0]["nvPairs"]["ROUTING_TAG"] = delem[profile][ + "route_tag" + ] + intf["interfaces"][0]["nvPairs"]["MTU"] = str(delem[profile]["mtu"]) + intf["interfaces"][0]["nvPairs"]["SPEED"] = str(delem[profile]["speed"]) intf["interfaces"][0]["nvPairs"]["INTF_NAME"] = ifname - if (delem[profile]['mode'] != 'monitor'): - intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]['description'] - if (delem[profile]['cmds'] is None): + if delem[profile]["mode"] != "monitor": + intf["interfaces"][0]["nvPairs"]["DESC"] = delem[profile]["description"] + if delem[profile]["cmds"] is None: intf["interfaces"][0]["nvPairs"]["CONF"] = "" else: - intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join(delem[profile]['cmds']) - intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str(delem[profile]['admin_state']).lower() + intf["interfaces"][0]["nvPairs"]["CONF"] = "\n".join( + delem[profile]["cmds"] + ) + intf["interfaces"][0]["nvPairs"]["ADMIN_STATE"] = str( + delem[profile]["admin_state"] + ).lower() # New Interfaces def dcnm_get_intf_payload(self, delem, sw): @@ -1806,12 +1945,10 @@ def dcnm_get_intf_payload(self, delem, sw): "interfaceType": "", "ifName": "", "fabricName": "", - "nvPairs": { - "SPEED": "Auto" - } + "nvPairs": {"SPEED": "Auto"}, } ], - "skipResourceCheck": str(False).lower() + "skipResourceCheck": str(False).lower(), } # Each interface type will have a different profile name. Set that based on the interface type and use that @@ -1820,71 +1957,71 @@ def dcnm_get_intf_payload(self, delem, sw): # Monitor ports are not put into diff_deploy, since they don't have any # commands to be executed on switch. This will affect the idempotence # check - if (delem['profile']['mode'] == 'monitor'): + if delem["profile"]["mode"] == "monitor": intf.update({"deploy": False}) else: - intf.update({"deploy": delem['deploy']}) + intf.update({"deploy": delem["deploy"]}) # Each type of interface and mode will have a different set of params. # First fill in the params common to all interface types and modes # intf.update ({"interfaceType" : self.int_types[delem['type']]}) - if ('vpc' == delem['type']): + if "vpc" == delem["type"]: intf["interfaces"][0].update({"serialNumber": str(self.vpc_ip_sn[sw])}) else: intf["interfaces"][0].update({"serialNumber": str(self.ip_sn[sw])}) - intf["interfaces"][0].update({"interfaceType": self.int_types[delem['type']]}) + intf["interfaces"][0].update({"interfaceType": self.int_types[delem["type"]]}) intf["interfaces"][0].update({"fabricName": self.fabric}) - if ('profile' not in delem.keys()): + if "profile" not in delem.keys(): # for state 'deleted', 'profile' construct is not included. So just update the ifName here # and return. Rest of the code is all 'profile' specific and hence not required for 'deleted' - ifname, port_id = self.dcnm_intf_get_if_name(delem['name'], delem['type']) + ifname, port_id = self.dcnm_intf_get_if_name(delem["name"], delem["type"]) intf["interfaces"][0].update({"ifName": ifname}) return intf - pol_ind_str = delem['type'] + '_' + delem['profile']['mode'] + pol_ind_str = delem["type"] + "_" + delem["profile"]["mode"] intf.update({"policy": self.pol_types[self.dcnm_version][pol_ind_str]}) - intf.update({"interfaceType": self.int_types[delem['type']]}) + intf.update({"interfaceType": self.int_types[delem["type"]]}) # Rest of the data in the dict depends on the interface type and the template - if ('pc' == delem['type']): - self.dcnm_intf_get_pc_payload(delem, intf, 'profile') - if ('sub_int' == delem['type']): - self.dcnm_intf_get_sub_intf_payload(delem, intf, 'profile') - if ('lo' == delem['type']): - self.dcnm_intf_get_loopback_payload(delem, intf, 'profile') - if ('vpc' == delem['type']): - self.dcnm_intf_get_vpc_payload(delem, intf, 'profile') - if ('eth' == delem['type']): - self.dcnm_intf_get_eth_payload(delem, intf, 'profile') + if "pc" == delem["type"]: + self.dcnm_intf_get_pc_payload(delem, intf, "profile") + if "sub_int" == delem["type"]: + self.dcnm_intf_get_sub_intf_payload(delem, intf, "profile") + if "lo" == delem["type"]: + self.dcnm_intf_get_loopback_payload(delem, intf, "profile") + if "vpc" == delem["type"]: + self.dcnm_intf_get_vpc_payload(delem, intf, "profile") + if "eth" == delem["type"]: + self.dcnm_intf_get_eth_payload(delem, intf, "profile") # Ethernet interface payload does not have interfaceType and skipResourceCheck flags. Pop # them out - intf.pop('skipResourceCheck') + intf.pop("skipResourceCheck") return intf def dcnm_intf_merge_intf_info(self, intf_info, if_head): - if (not if_head): + if not if_head: if_head.append(intf_info) return for item in if_head: - if (item['policy'] == intf_info['policy']): - item['interfaces'].append(intf_info['interfaces'][0]) + if item["policy"] == intf_info["policy"]: + item["interfaces"].append(intf_info["interfaces"][0]) return if_head.append(intf_info) def dcnm_intf_get_want(self): - if (None is self.config): + if None is self.config: return if not self.intf_info: @@ -1893,10 +2030,10 @@ def dcnm_intf_get_want(self): # self.intf_info is a list of directories each having config related to a particular interface for delem in self.intf_info: - if (any('profile' in key for key in delem)): - for sw in delem['switch']: + if any("profile" in key for key in delem): + for sw in delem["switch"]: intf_payload = self.dcnm_get_intf_payload(delem, sw) - if (intf_payload not in self.want): + if intf_payload not in self.want: self.want.append(intf_payload) def dcnm_intf_get_intf_info(self, ifName, serialNumber, ifType): @@ -1904,60 +2041,62 @@ def dcnm_intf_get_intf_info(self, ifName, serialNumber, ifType): # For VPC interfaces the serialNumber will be a combibed one. But GET on interface cannot # pass this combined serial number. We will have to pass individual ones - if (ifType == 'INTERFACE_VPC'): - sno = serialNumber.split('~')[0] + if ifType == "INTERFACE_VPC": + sno = serialNumber.split("~")[0] else: sno = serialNumber path = self.paths["IF_WITH_SNO_IFNAME"].format(sno, ifName) - resp = dcnm_send(self.module, 'GET', path) + resp = dcnm_send(self.module, "GET", path) - if ('DATA' in resp and resp['DATA']): - return resp['DATA'][0] + if "DATA" in resp and resp["DATA"]: + return resp["DATA"][0] else: return [] def dcnm_intf_get_intf_info_from_dcnm(self, intf): - return self.dcnm_intf_get_intf_info(intf['ifName'], intf['serialNumber'], intf['interfaceType']) + return self.dcnm_intf_get_intf_info( + intf["ifName"], intf["serialNumber"], intf["interfaceType"] + ) def dcnm_intf_get_have_all_with_sno(self, sno): - if '~' in sno: - sno = sno.split('~')[0] + if "~" in sno: + sno = sno.split("~")[0] path = self.paths["IF_DETAIL_WITH_SNO"].format(sno) - resp = dcnm_send(self.module, 'GET', path) + resp = dcnm_send(self.module, "GET", path) - if ('DATA' in resp and resp['DATA']): - self.have_all.extend(resp['DATA']) + if "DATA" in resp and resp["DATA"]: + self.have_all.extend(resp["DATA"]) def dcnm_intf_get_have_all(self, sw): # Check if you have already got the details for this switch - if (sw in self.have_all_list): + if sw in self.have_all_list: return # Check if the serial number is a combined one which will be the case for vPC interfaces. # If combined, then split it up and pass one of the serial numbers and not the combined one. - if ('~' in self.ip_sn[sw]): - sno = self.ip_sn[sw].split('~')[0] + if "~" in self.ip_sn[sw]: + sno = self.ip_sn[sw].split("~")[0] else: sno = self.ip_sn[sw] self.have_all_list.append(sw) - self.dcnm_intf_get_have_all_with_sno (sno) + self.dcnm_intf_get_have_all_with_sno(sno) def dcnm_intf_get_have(self): - if (not self.want): + if not self.want: return # We have all the requested interface config in self.want. Interfaces are grouped together based on the # policy string and the interface name in a single dict entry. for elem in self.want: - for intf in elem['interfaces']: + for intf in elem["interfaces"]: # For each interface present here, get the information that is already available # in DCNM. Based on this information, we will create the required payloads to be sent # to the DCNM controller based on the requested @@ -1965,7 +2104,7 @@ def dcnm_intf_get_have(self): # Fetch the information from DCNM w.r.t to the interafce that we have in self.want intf_payload = self.dcnm_intf_get_intf_info_from_dcnm(intf) - if (intf_payload): + if intf_payload: self.have.append(intf_payload) def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): @@ -1978,76 +2117,96 @@ def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): e1 = ie1 e2 = ie2 else: - if (isinstance(ie1, unicode)): # noqa - e1 = ie1.encode('utf-8') + if isinstance(ie1, unicode): # noqa pylint: disable=undefined-variable + e1 = ie1.encode("utf-8") else: e1 = ie1 - if (isinstance(ie2, unicode)): # noqa - e2 = ie2.encode('utf-8') + if isinstance(ie2, unicode): # noqa pylint: disable=undefined-variable + e2 = ie2.encode("utf-8") else: e2 = ie2 # The keys in key_translate represent a concatenated string. We should split # these strings and then compare the values - key_translate = ['MEMBER_INTERFACES', 'CONF', 'PEER1_MEMBER_INTERFACES', 'PEER2_MEMBER_INTERFACES', 'PEER1_PO_CONF', 'PEER2_PO_CONF'] + key_translate = [ + "MEMBER_INTERFACES", + "CONF", + "PEER1_MEMBER_INTERFACES", + "PEER2_MEMBER_INTERFACES", + "PEER1_PO_CONF", + "PEER2_PO_CONF", + ] # Some keys have values given as a list which is encoded into a # string. So split that up into list and then use 'set' to process # the same irrespective of the order of elements - if (k in key_translate): + if k in key_translate: # CONF, PEER1_PO_CONF and PEER2_PO_CONF has '\n' joining the commands # MEMBER_INTERFACES, PEER1_MEMBER_INTERFACES, and PEER2_MEMBER_INTERFACES # have ',' joining differnet elements. So use a multi-delimiter split # to split with any delim - t_e1 = set(re.split(r'[\n,]', e1.strip())) - t_e2 = set(re.split(r'[\n,]', e2.strip())) + t_e1 = set(re.split(r"[\n,]", e1.strip())) + t_e2 = set(re.split(r"[\n,]", e2.strip())) else: - if (isinstance(e1, str)): + if isinstance(e1, str): t_e1 = e1.lower() else: t_e1 = e1 - if (isinstance(e2, str)): + if isinstance(e2, str): t_e2 = e2.lower() else: t_e2 = e2 - if (t_e1 != t_e2): - if ((state == 'replaced') or (state == 'overridden')): - return 'add' - elif (state == 'merged'): + if t_e1 != t_e2: + if (state == "replaced") or (state == "overridden"): + return "add" + elif state == "merged": # If the key is included in config, then use the value from want. # If the key is not included in config, then use the value from # have. # Match and find the corresponding PB input. - match_pb = [pb for pb in self.pb_input if ((name.lower() == pb['ifname'].lower()) and - (sno == pb['sno']) and - (fabric == pb['fabric']))] + match_pb = [ + pb + for pb in self.pb_input + if ( + (name.lower() == pb["ifname"].lower()) + and (sno == pb["sno"]) + and (fabric == pb["fabric"]) + ) + ] pb_keys = list(match_pb[0].keys()) - if (self.keymap[k] not in pb_keys): + if self.keymap[k] not in pb_keys: # Copy the value from have, because for 'merged' state we # should leave values that are not specified in config as is. # We copy 'have' because, the validate input would have defaulted the # values for non-mandatory objects. - return 'copy_and_add' + return "copy_and_add" else: - return 'add' - return 'dont_add' + return "add" + return "dont_add" def dcnm_intf_can_be_added(self, want): - name = want['interfaces'][0]['ifName'] - sno = want['interfaces'][0]['serialNumber'] - fabric = want['interfaces'][0]['fabricName'] - - match_have = [have for have in self.have_all if ((name.lower() == have['ifName'].lower()) and - (sno == have['serialNo']) and - (fabric == have['fabricName']))] - if (match_have): - if ((match_have[0]['complianceStatus'] != 'In-Sync') and - (match_have[0]['complianceStatus'] != 'Pending')): + name = want["interfaces"][0]["ifName"] + sno = want["interfaces"][0]["serialNumber"] + fabric = want["interfaces"][0]["fabricName"] + + match_have = [ + have + for have in self.have_all + if ( + (name.lower() == have["ifName"].lower()) + and (sno == have["serialNo"]) + and (fabric == have["fabricName"]) + ) + ] + if match_have: + if (match_have[0]["complianceStatus"] != "In-Sync") and ( + match_have[0]["complianceStatus"] != "Pending" + ): return match_have[0], True else: return match_have[0], False @@ -2058,148 +2217,177 @@ def dcnm_intf_compare_want_and_have(self, state): for want in self.want: delem = {} - action = '' - name = want['interfaces'][0]['ifName'] - sno = want['interfaces'][0]['serialNumber'] - fabric = want['interfaces'][0]['fabricName'] - deploy = want['deploy'] + action = "" + name = want["interfaces"][0]["ifName"] + sno = want["interfaces"][0]["serialNumber"] + fabric = want["interfaces"][0]["fabricName"] + deploy = want["deploy"] intf_changed = False - want.pop('deploy') + want.pop("deploy") - match_have = [d for d in self.have if ((name.lower() == d['interfaces'][0]['ifName'].lower()) and - (sno == d['interfaces'][0]['serialNumber']))] + match_have = [ + d + for d in self.have + if ( + (name.lower() == d["interfaces"][0]["ifName"].lower()) + and (sno == d["interfaces"][0]["serialNumber"]) + ) + ] - if (not match_have): + if not match_have: changed_dict = copy.deepcopy(want) - if ((state == 'merged') or (state == 'replaced') or (state == 'overridden')): - action = 'add' + if ( + (state == "merged") + or (state == "replaced") + or (state == "overridden") + ): + action = "add" else: wkeys = list(want.keys()) - if ('skipResourceCheck' in wkeys): - wkeys.remove('skipResourceCheck') - if ('interfaceType' in wkeys): - wkeys.remove('interfaceType') + if "skipResourceCheck" in wkeys: + wkeys.remove("skipResourceCheck") + if "interfaceType" in wkeys: + wkeys.remove("interfaceType") for d in match_have: changed_dict = copy.deepcopy(want) - if ('skipResourceCheck' in changed_dict.keys()): - changed_dict.pop('skipResourceCheck') + if "skipResourceCheck" in changed_dict.keys(): + changed_dict.pop("skipResourceCheck") # First check if the policies are same for want and have. If they are different, we cannot compare # the profiles because each profile will have different elements. As per PRD, if policies are different # we should not merge the information. For now we will assume we will oerwrite the same. Don't compare # rest of the structure. Overwrite with waht ever is in want - if (want['policy'] != d['policy']): - action = 'update' + if want["policy"] != d["policy"]: + action = "update" continue - else: - for k in wkeys: - if (k == 'interfaces'): - if_keys = list(want[k][0].keys()) - if_keys.remove('interfaceType') - changed_dict[k][0].pop('interfaceType') - - # 'have' will not contain the fabric name object. So do not try to compare that. This - # is especially true for Ethernet interfaces. Since a switch can belong to only one fabric - # the serial number should be unique across all fabrics - if_keys.remove('fabricName') - changed_dict[k][0].pop('fabricName') - for ik in if_keys: - if (ik == 'nvPairs'): - nv_keys = list(want[k][0][ik].keys()) - nv_keys.remove("SPEED") - for nk in nv_keys: - # HAVE may have an entry with a list # of interfaces. Check all the - # interface entries for a match. Even if one entry matches do not - # add the interface - for index in range(len(d[k])): - res = self.dcnm_intf_compare_elements(name, sno, fabric, - want[k][0][ik][nk], - d[k][index][ik][nk], nk, state) - if (res == 'dont_add'): - break - if (res == 'copy_and_add'): - want[k][0][ik][nk] = d[k][0][ik][nk] - changed_dict[k][0][ik][nk] = d[k][0][ik][nk] - if (res != 'dont_add'): - action = 'update' - else: - # Keys and values match. Remove from changed_dict - changed_dict[k][0][ik].pop(nk) - else: + + for k in wkeys: + if k == "interfaces": + if_keys = list(want[k][0].keys()) + if_keys.remove("interfaceType") + changed_dict[k][0].pop("interfaceType") + + # 'have' will not contain the fabric name object. So do not try to compare that. This + # is especially true for Ethernet interfaces. Since a switch can belong to only one fabric + # the serial number should be unique across all fabrics + if_keys.remove("fabricName") + changed_dict[k][0].pop("fabricName") + for ik in if_keys: + if ik == "nvPairs": + nv_keys = list(want[k][0][ik].keys()) + nv_keys.remove("SPEED") + for nk in nv_keys: # HAVE may have an entry with a list # of interfaces. Check all the # interface entries for a match. Even if one entry matches do not # add the interface for index in range(len(d[k])): - res = self.dcnm_intf_compare_elements(name, sno, fabric, - want[k][0][ik], - d[k][0][ik], ik, state) - if (res == 'dont_add'): + res = self.dcnm_intf_compare_elements( + name, + sno, + fabric, + want[k][0][ik][nk], + d[k][index][ik][nk], + nk, + state, + ) + if res == "dont_add": break - if (res == 'copy_and_add'): - want[k][0][ik] = d[k][0][ik] - changed_dict[k][0][ik] = d[k][0][ik] - if (res != 'dont_add'): - action = 'update' + if res == "copy_and_add": + want[k][0][ik][nk] = d[k][0][ik][nk] + changed_dict[k][0][ik][nk] = d[k][0][ik][nk] + if res != "dont_add": + action = "update" else: # Keys and values match. Remove from changed_dict - if (ik != 'ifName'): - changed_dict[k][0].pop(ik) - else: - res = self.dcnm_intf_compare_elements(name, sno, fabric, - want[k], d[k], k, state) - - if (res == 'copy_and_add'): - want[k] = d[k] - changed_dict[k] = d[k] - if (res != 'dont_add'): - action = 'update' + changed_dict[k][0][ik].pop(nk) else: - # Keys and values match. Remove from changed_dict. - changed_dict.pop(k) + # HAVE may have an entry with a list # of interfaces. Check all the + # interface entries for a match. Even if one entry matches do not + # add the interface + for index in range(len(d[k])): + res = self.dcnm_intf_compare_elements( + name, + sno, + fabric, + want[k][0][ik], + d[k][0][ik], + ik, + state, + ) + if res == "dont_add": + break + if res == "copy_and_add": + want[k][0][ik] = d[k][0][ik] + changed_dict[k][0][ik] = d[k][0][ik] + if res != "dont_add": + action = "update" + else: + # Keys and values match. Remove from changed_dict + if ik != "ifName": + changed_dict[k][0].pop(ik) + else: + res = self.dcnm_intf_compare_elements( + name, sno, fabric, want[k], d[k], k, state + ) + + if res == "copy_and_add": + want[k] = d[k] + changed_dict[k] = d[k] + if res != "dont_add": + action = "update" + else: + # Keys and values match. Remove from changed_dict. + changed_dict.pop(k) - if (action == 'add'): + if action == "add": self.dcnm_intf_merge_intf_info(want, self.diff_create) # Add the changed_dict to self.changed_dict self.changed_dict[0][state].append(changed_dict) intf_changed = True - elif (action == 'update'): + elif action == "update": # Remove the 'interfaceType' key from 'want'. It is not required for 'replace' - if (want.get('interfaceType', None) is not None): - want.pop('interfaceType') + if want.get("interfaceType", None) is not None: + want.pop("interfaceType") self.dcnm_intf_merge_intf_info(want, self.diff_replace) # Add the changed_dict to self.changed_dict self.changed_dict[0][state].append(changed_dict) intf_changed = True # if deploy flag is set to True, add the information so that this interface will be deployed - if (str(deploy) == 'True'): + if str(deploy) == "True": # Add to diff_deploy, # 1. if intf_changed is True # 2. if intf_changed is Flase, then if 'complianceStatus is # False then add to diff_deploy. # 3. Do not add otherwise - if (False is intf_changed): + if False is intf_changed: match_intf, rc = self.dcnm_intf_can_be_added(want) else: match_intf = [] rc = True - if (True is rc): - delem['serialNumber'] = sno - delem['ifName'] = name - delem['fabricName'] = self.fabric + if True is rc: + delem["serialNumber"] = sno + delem["ifName"] = name + delem["fabricName"] = self.fabric self.diff_deploy.append(delem) - self.changed_dict[0]['deploy'].append(copy.deepcopy(delem)) + self.changed_dict[0]["deploy"].append(copy.deepcopy(delem)) if match_intf != []: - self.changed_dict[0]['debugs'].append({"Name": name, "SNO": sno, "DeployStatus": match_intf["complianceStatus"]}) + self.changed_dict[0]["debugs"].append( + { + "Name": name, + "SNO": sno, + "DeployStatus": match_intf["complianceStatus"], + } + ) def dcnm_intf_get_diff_replaced(self): @@ -2216,7 +2404,7 @@ def dcnm_intf_get_diff_replaced(self): # should be sent to DCNM for updation. The list can include information on interfaces which # are already presnt in self.have and which differ in the values for atleast one of the keys - self.dcnm_intf_compare_want_and_have('replaced') + self.dcnm_intf_compare_want_and_have("replaced") def dcnm_intf_get_diff_merge(self): @@ -2235,65 +2423,67 @@ def dcnm_intf_get_diff_merge(self): # NOTE: merge_diff will be updated only if there is some new information that is not already # existing. If existing information needs to be updated then use 'replace'. - self.dcnm_intf_compare_want_and_have('merged') + self.dcnm_intf_compare_want_and_have("merged") def dcnm_compare_default_payload(self, intf, have): - if(intf.get('policy') != have.get('policy')): - return 'DCNM_INTF_NOT_MATCH' - - intf_nv = intf.get('interfaces')[0].get('nvPairs') - have_nv = have.get('interfaces')[0].get('nvPairs') - - if(intf_nv.get('INTF_VRF') != have_nv.get('INTF_VRF')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('IP') != have_nv.get('IP')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('PREFIX') != have_nv.get('PREFIX')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('ROUTING_TAG') != have_nv.get('ROUTING_TAG')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('MTU') != have_nv.get('MTU')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('SPEED') != have_nv.get('SPEED')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('DESC') != have_nv.get('DESC')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('CONF') != have_nv.get('CONF')): - return 'DCNM_INTF_NOT_MATCH' - if(intf_nv.get('ADMIN_STATE') != have_nv.get('ADMIN_STATE')): - return 'DCNM_INTF_NOT_MATCH' - return 'DCNM_INTF_MATCH' + if intf.get("policy") != have.get("policy"): + return "DCNM_INTF_NOT_MATCH" + + intf_nv = intf.get("interfaces")[0].get("nvPairs") + have_nv = have.get("interfaces")[0].get("nvPairs") + + if intf_nv.get("INTF_VRF") != have_nv.get("INTF_VRF"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("IP") != have_nv.get("IP"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("PREFIX") != have_nv.get("PREFIX"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("ROUTING_TAG") != have_nv.get("ROUTING_TAG"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("MTU") != have_nv.get("MTU"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("SPEED") != have_nv.get("SPEED"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("DESC") != have_nv.get("DESC"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("CONF") != have_nv.get("CONF"): + return "DCNM_INTF_NOT_MATCH" + if intf_nv.get("ADMIN_STATE") != have_nv.get("ADMIN_STATE"): + return "DCNM_INTF_NOT_MATCH" + return "DCNM_INTF_MATCH" def dcnm_intf_get_default_eth_payload(self, ifname, sno, fabric): # default payload to be sent to DCNM for override case eth_payload = { "policy": self.pol_types[self.dcnm_version]["eth_routed"], - "interfaces": [{ - "interfaceType": "INTERFACE_ETHERNET", - "serialNumber": sno, - "ifName": "", - "fabricName": fabric, - "nvPairs": { + "interfaces": [ + { "interfaceType": "INTERFACE_ETHERNET", - "INTF_VRF": "", - "IP": "", - "PREFIX": "", - "ROUTING_TAG": "", - "MTU": "9216", - "SPEED": "Auto", - "DESC": "", - "CONF": "no shutdown", - "ADMIN_STATE": "true", - "INTF_NAME": ifname + "serialNumber": sno, + "ifName": "", + "fabricName": fabric, + "nvPairs": { + "interfaceType": "INTERFACE_ETHERNET", + "INTF_VRF": "", + "IP": "", + "PREFIX": "", + "ROUTING_TAG": "", + "MTU": "9216", + "SPEED": "Auto", + "DESC": "", + "CONF": "no shutdown", + "ADMIN_STATE": "true", + "INTF_NAME": ifname, + }, } - }] + ], } - eth_payload['interfaces'][0]["ifName"] = ifname - eth_payload['interfaces'][0]["serialNumber"] = sno - eth_payload['interfaces'][0]["fabricName"] = fabric + eth_payload["interfaces"][0]["ifName"] = ifname + eth_payload["interfaces"][0]["serialNumber"] = sno + eth_payload["interfaces"][0]["fabricName"] = fabric return eth_payload @@ -2302,38 +2492,51 @@ def dcnm_intf_can_be_replaced(self, have): for item in self.pb_input: # For overridden state, we will not touch anything that is present in incoming config, # because those interfaces will anyway be modified in the current run - if ((self.module.params['state'] == 'overridden') and - (item['ifname'] == have['ifName'])): - return False, item['ifname'] - if (item.get('members')): - if (have['ifName'] in - [self.dcnm_intf_get_if_name(mem, 'eth')[0] for mem in item['members']]): - return False, item['ifname'] - elif ((item.get('peer1_members')) or (item.get('peer2_members'))): - if ((have['ifName'] in - [self.dcnm_intf_get_if_name(mem, 'eth')[0] for mem in item['peer1_members']]) or - (have['ifName'] in - [self.dcnm_intf_get_if_name(mem, 'eth')[0] for mem in item['peer2_members']])): - return False, item['ifname'] + if (self.module.params["state"] == "overridden") and ( + item["ifname"] == have["ifName"] + ): + return False, item["ifname"] + if item.get("members"): + if have["ifName"] in [ + self.dcnm_intf_get_if_name(mem, "eth")[0] for mem in item["members"] + ]: + return False, item["ifname"] + elif (item.get("peer1_members")) or (item.get("peer2_members")): + if ( + have["ifName"] + in [ + self.dcnm_intf_get_if_name(mem, "eth")[0] + for mem in item["peer1_members"] + ] + ) or ( + have["ifName"] + in [ + self.dcnm_intf_get_if_name(mem, "eth")[0] + for mem in item["peer2_members"] + ] + ): + return False, item["ifname"] return True, None def dcnm_intf_process_config(self, cfg): processed = [] - if(None is cfg.get('switch', None)): + if None is cfg.get("switch", None): return - for sw in cfg['switch']: + for sw in cfg["switch"]: sno = self.ip_sn[sw] - if (sno not in processed): + if sno not in processed: processed.append(sno) # If the switch is part of VPC pair, then a GET on any serial number will fetch details of # both the switches. So check before adding to have_all - if not any(d.get('serialNo', None) == self.ip_sn[sw] for d in self.have_all): + if not any( + d.get("serialNo", None) == self.ip_sn[sw] for d in self.have_all + ): self.dcnm_intf_get_have_all(sw) def dcnm_intf_get_diff_overridden(self, cfg): @@ -2344,16 +2547,19 @@ def dcnm_intf_get_diff_overridden(self, cfg): self.diff_deploy = [] self.diff_replace = [] - if ((cfg is not None) and (cfg != [])): + if (cfg is not None) and (cfg != []): self.dcnm_intf_process_config(cfg) - elif ([] == cfg): + elif [] == cfg: for address in self.ip_sn.keys(): # the given switch may be part of a VPC pair. In that case we # need to get interface information using one switch which returns interfaces # from both the switches - if not any(d.get('serialNo', None) == self.ip_sn[address] for d in self.have_all): + if not any( + d.get("serialNo", None) == self.ip_sn[address] + for d in self.have_all + ): self.dcnm_intf_get_have_all(address) - elif (self.config): + elif self.config: # compute have_all for every switch for config in self.config: self.dcnm_intf_process_config(config) @@ -2364,14 +2570,16 @@ def dcnm_intf_get_diff_overridden(self, cfg): for have in self.have_all: delem = {} - name = have['ifName'] - sno = have['serialNo'] - fabric = have['fabricName'] + name = have["ifName"] + sno = have["serialNo"] + fabric = have["fabricName"] - if ((have['ifType'] == 'INTERFACE_ETHERNET') and - ((str(have['isPhysical']).lower() != 'none') and (str(have['isPhysical']).lower() == 'true'))): + if (have["ifType"] == "INTERFACE_ETHERNET") and ( + (str(have["isPhysical"]).lower() != "none") + and (str(have["isPhysical"]).lower() == "true") + ): - if (str(have['deletable']).lower() == 'false'): + if str(have["deletable"]).lower() == "false": # Add this 'have to a deferred list. We will process this list once we have processed all the 'haves' defer_list.append(have) continue @@ -2381,47 +2589,56 @@ def dcnm_intf_get_diff_overridden(self, cfg): # what is already present. If both are same, skip the interface. # So during idempotence, we may add the same interface again if we don't compare - intf = self.dcnm_intf_get_intf_info(have['ifName'], have['serialNo'], have['ifType']) - if (self.dcnm_compare_default_payload(uelem, intf) == 'DCNM_INTF_MATCH'): + intf = self.dcnm_intf_get_intf_info( + have["ifName"], have["serialNo"], have["ifType"] + ) + if self.dcnm_compare_default_payload(uelem, intf) == "DCNM_INTF_MATCH": continue - if (uelem is not None): + if uelem is not None: # Before defaulting ethernet interfaces, check if they are # member of any port-channel. If so, do not default that rc, intf = self.dcnm_intf_can_be_replaced(have) - if (rc is True): + if rc is True: self.dcnm_intf_merge_intf_info(uelem, self.diff_replace) - self.changed_dict[0]['replaced'].append(copy.deepcopy(uelem)) - delem['serialNumber'] = sno - delem['ifName'] = name - delem['fabricName'] = self.fabric + self.changed_dict[0]["replaced"].append(copy.deepcopy(uelem)) + delem["serialNumber"] = sno + delem["ifName"] = name + delem["fabricName"] = self.fabric self.diff_deploy.append(delem) - self.changed_dict[0]['deploy'].append(copy.deepcopy(delem)) + self.changed_dict[0]["deploy"].append(copy.deepcopy(delem)) # Sub-interafces are returned as INTERFACE_ETHERNET in have_all. So do an # additional check to see if it is physical. If not assume it to be sub-interface # for now. We will have to re-visit this check if there are additional non-physical # interfaces which have the same ETHERNET interafce type. For e.g., FEX ports - if ((have['ifType'] == 'INTERFACE_PORT_CHANNEL') or - (have['ifType'] == 'INTERFACE_LOOPBACK') or - (have['ifType'] == 'SUBINTERFACE') or - (have['ifType'] == 'INTERFACE_VPC') or - ((have['ifType'] == 'INTERFACE_ETHERNET') and - ((str(have['isPhysical']).lower() == 'none') or (str(have['isPhysical']).lower() == "false")))): + if ( + (have["ifType"] == "INTERFACE_PORT_CHANNEL") + or (have["ifType"] == "INTERFACE_LOOPBACK") + or (have["ifType"] == "SUBINTERFACE") + or (have["ifType"] == "INTERFACE_VPC") + or ( + (have["ifType"] == "INTERFACE_ETHERNET") + and ( + (str(have["isPhysical"]).lower() == "none") + or (str(have["isPhysical"]).lower() == "false") + ) + ) + ): # Certain interfaces cannot be deleted, so check before deleting. - if (str(have['deletable']).lower() == 'true'): + if str(have["deletable"]).lower() == "true": # Port-channel which are created as part of VPC peer link should not be deleted - if (have['ifType'] == 'INTERFACE_PORT_CHANNEL'): - if (have['alias'] == '"vpc-peer-link"'): + if have["ifType"] == "INTERFACE_PORT_CHANNEL": + if have["alias"] == '"vpc-peer-link"': continue # Interfaces sometimes take time to get deleted from DCNM. Such interfaces will have # underlayPolicies set to "None". Such interfaces need not be deleted again - if (have['underlayPolicies'] is None): + if have["underlayPolicies"] is None: continue # For interfaces that are matching, leave them alone. We will overwrite the config anyway @@ -2430,24 +2647,32 @@ def dcnm_intf_get_diff_overridden(self, cfg): # Check if this interface is present in want. If yes, ignore the interface, because all # configuration from want will be added to create anyway - match_want = [d for d in self.want if ((name.lower() == d['interfaces'][0]['ifName'].lower()) and - (sno == d['interfaces'][0]['serialNumber']) and - (fabric == d['interfaces'][0]['fabricName']))] + match_want = [ + d + for d in self.want + if ( + (name.lower() == d["interfaces"][0]["ifName"].lower()) + and (sno == d["interfaces"][0]["serialNumber"]) + and (fabric == d["interfaces"][0]["fabricName"]) + ) + ] - if (not match_want): + if not match_want: delem = {} delem["interfaceDbId"] = 0 - delem["interfaceType"] = have['ifType'] + delem["interfaceType"] = have["ifType"] delem["ifName"] = name delem["serialNumber"] = sno delem["fabricName"] = fabric - self.diff_delete[self.int_index[have['ifType']]].append(delem) + self.diff_delete[self.int_index[have["ifType"]]].append(delem) if have["mode"] is not None: - self.diff_delete_deploy[self.int_index[have['ifType']]].append(delem) - self.changed_dict[0]['deleted'].append(copy.deepcopy(delem)) + self.diff_delete_deploy[ + self.int_index[have["ifType"]] + ].append(delem) + self.changed_dict[0]["deleted"].append(copy.deepcopy(delem)) del_list.append(have) for intf in defer_list: @@ -2455,26 +2680,34 @@ def dcnm_intf_get_diff_overridden(self, cfg): # If so you can default/reset this ethernet interface also delem = {} - sno = intf['serialNo'] - fabric = intf['fabricName'] - name = intf['underlayPolicies'][0]['source'] - - match = [d for d in del_list if ((name.lower() == d['ifName'].lower()) and - (sno in d['serialNo']) and - (fabric == d['fabricName']))] - if (match): - - uelem = self.dcnm_intf_get_default_eth_payload(intf['ifName'], sno, fabric) + sno = intf["serialNo"] + fabric = intf["fabricName"] + name = intf["underlayPolicies"][0]["source"] + + match = [ + d + for d in del_list + if ( + (name.lower() == d["ifName"].lower()) + and (sno in d["serialNo"]) + and (fabric == d["fabricName"]) + ) + ] + if match: + + uelem = self.dcnm_intf_get_default_eth_payload( + intf["ifName"], sno, fabric + ) self.dcnm_intf_merge_intf_info(uelem, self.diff_replace) - self.changed_dict[0]['replaced'].append(copy.deepcopy(uelem)) - delem['serialNumber'] = sno - delem['ifName'] = intf['ifName'] - delem['fabricName'] = self.fabric + self.changed_dict[0]["replaced"].append(copy.deepcopy(uelem)) + delem["serialNumber"] = sno + delem["ifName"] = intf["ifName"] + delem["fabricName"] = self.fabric self.diff_deploy.append(delem) - self.changed_dict[0]['deploy'].append(copy.deepcopy(delem)) + self.changed_dict[0]["deploy"].append(copy.deepcopy(delem)) - self.dcnm_intf_compare_want_and_have('overridden') + self.dcnm_intf_compare_want_and_have("overridden") def dcnm_intf_get_diff_deleted(self): @@ -2484,7 +2717,7 @@ def dcnm_intf_get_diff_deleted(self): self.diff_deploy = [] self.diff_replace = [] - if ((None is self.config) or (self.config is [])): + if (None is self.config) or (self.config is []): # If no config is specified, then it means we need to delete or # reset all interfaces in the fabric. @@ -2495,26 +2728,29 @@ def dcnm_intf_get_diff_deleted(self): # the given switch may be part of a VPC pair. In that case we # need to get interface information using one switch which returns interfaces # from both the switches - if not any(d.get('serialNo', None) == self.ip_sn[address] for d in self.have_all): + if not any( + d.get("serialNo", None) == self.ip_sn[address] + for d in self.have_all + ): self.dcnm_intf_get_have_all(address) # Now that we have all the interface information we can run override # and delete or reset interfaces. self.dcnm_intf_get_diff_overridden(None) - elif (self.config): + elif self.config: for cfg in self.config: - if (cfg.get('name', None) is not None): + if cfg.get("name", None) is not None: processed = [] have_all = [] # If interface name alone is given, then delete or reset the # interface on all switches in the fabric - switches = cfg.get('switch', None) + switches = cfg.get("switch", None) - if (switches is None): + if switches is None: switches = self.ip_sn.keys() else: - switches = cfg['switch'] + switches = cfg["switch"] for sw in switches: intf = {} @@ -2523,100 +2759,129 @@ def dcnm_intf_get_diff_deleted(self): if_name, if_type = self.dcnm_extract_if_name(cfg) # Check if the interface is present in DCNM - intf['interfaceType'] = if_type - if (if_type == 'INTERFACE_VPC'): - intf['serialNumber'] = self.vpc_ip_sn[sw] + intf["interfaceType"] = if_type + if if_type == "INTERFACE_VPC": + intf["serialNumber"] = self.vpc_ip_sn[sw] else: - intf['serialNumber'] = self.ip_sn[sw] - intf['ifName'] = if_name + intf["serialNumber"] = self.ip_sn[sw] + intf["ifName"] = if_name - if (intf['serialNumber'] not in processed): - processed.append(intf['serialNumber']) + if intf["serialNumber"] not in processed: + processed.append(intf["serialNumber"]) else: continue # Ethernet interfaces cannot be deleted - if (if_type == 'INTERFACE_ETHERNET'): + if if_type == "INTERFACE_ETHERNET": - if (sw not in have_all): + if sw not in have_all: have_all.append(sw) self.dcnm_intf_get_have_all(sw) # Get the matching interface from have_all - match_have = [have for have in self.have_all if ((intf['ifName'].lower() == have['ifName'].lower()) and - (intf['serialNumber'] == have['serialNo']))][0] - if (match_have and (str(match_have['isPhysical']).lower() != 'none') and (str(match_have['isPhysical']).lower() == 'true')): - - if (str(match_have['deletable']).lower() == 'false'): + match_have = [ + have + for have in self.have_all + if ( + (intf["ifName"].lower() == have["ifName"].lower()) + and (intf["serialNumber"] == have["serialNo"]) + ) + ][0] + if ( + match_have + and (str(match_have["isPhysical"]).lower() != "none") + and (str(match_have["isPhysical"]).lower() == "true") + ): + + if str(match_have["deletable"]).lower() == "false": continue - uelem = self.dcnm_intf_get_default_eth_payload(intf['ifName'], intf['serialNumber'], self.fabric) - intf_payload = self.dcnm_intf_get_intf_info_from_dcnm(intf) + uelem = self.dcnm_intf_get_default_eth_payload( + intf["ifName"], intf["serialNumber"], self.fabric + ) + intf_payload = self.dcnm_intf_get_intf_info_from_dcnm( + intf + ) # Before we add the interface to replace list, check if the default payload is same as # what is already present. If both are same, skip the interface. This is required specifically # for ethernet interfaces because they don't actually get deleted. they will only be defaulted. # So during idempotence, we may add the same interface again if we don't compare - if (intf_payload != []): - if (self.dcnm_compare_default_payload(uelem, intf_payload) == 'DCNM_INTF_MATCH'): + if intf_payload != []: + if ( + self.dcnm_compare_default_payload( + uelem, intf_payload + ) + == "DCNM_INTF_MATCH" + ): continue - if (uelem is not None): + if uelem is not None: # Before defaulting ethernet interfaces, check if they are # member of any port-channel. If so, do not default that - rc, iface = self.dcnm_intf_can_be_replaced(match_have) - if (rc is True): - self.dcnm_intf_merge_intf_info(uelem, self.diff_replace) - self.changed_dict[0]['replaced'].append(copy.deepcopy(uelem)) - delem['serialNumber'] = intf['serialNumber'] - delem['ifName'] = if_name - delem['fabricName'] = self.fabric + rc, iface = self.dcnm_intf_can_be_replaced( + match_have + ) + if rc is True: + self.dcnm_intf_merge_intf_info( + uelem, self.diff_replace + ) + self.changed_dict[0]["replaced"].append( + copy.deepcopy(uelem) + ) + delem["serialNumber"] = intf["serialNumber"] + delem["ifName"] = if_name + delem["fabricName"] = self.fabric self.diff_deploy.append(delem) else: intf_payload = self.dcnm_intf_get_intf_info_from_dcnm(intf) - if (intf_payload != []): + if intf_payload != []: delem["interfaceDbId"] = 0 delem["interfaceType"] = if_type delem["ifName"] = if_name - delem["serialNumber"] = intf['serialNumber'] + delem["serialNumber"] = intf["serialNumber"] delem["fabricName"] = self.fabric self.diff_delete[self.int_index[if_type]].append(delem) if "monitor" not in intf_payload["policy"]: - self.diff_delete_deploy[self.int_index[if_type]].append(delem) - self.changed_dict[0]['deleted'].append(copy.deepcopy(delem)) + self.diff_delete_deploy[ + self.int_index[if_type] + ].append(delem) + self.changed_dict[0]["deleted"].append( + copy.deepcopy(delem) + ) else: self.dcnm_intf_get_diff_overridden(cfg) def dcnm_extract_if_name(self, cfg): - if (cfg['name'][0:2].lower() == 'po'): - if_name, port_id = self.dcnm_intf_get_if_name(cfg['name'], 'pc') - if_type = 'INTERFACE_PORT_CHANNEL' - elif (cfg['name'][0:2].lower() == 'lo'): - if_name, port_id = self.dcnm_intf_get_if_name(cfg['name'], 'lo') - if_type = 'INTERFACE_LOOPBACK' - elif (cfg['name'][0:3].lower() == 'eth'): - if ('.' not in cfg['name']): - if_name, port_id = self.dcnm_intf_get_if_name(cfg['name'], 'eth') - if_type = 'INTERFACE_ETHERNET' + if cfg["name"][0:2].lower() == "po": + if_name, port_id = self.dcnm_intf_get_if_name(cfg["name"], "pc") + if_type = "INTERFACE_PORT_CHANNEL" + elif cfg["name"][0:2].lower() == "lo": + if_name, port_id = self.dcnm_intf_get_if_name(cfg["name"], "lo") + if_type = "INTERFACE_LOOPBACK" + elif cfg["name"][0:3].lower() == "eth": + if "." not in cfg["name"]: + if_name, port_id = self.dcnm_intf_get_if_name(cfg["name"], "eth") + if_type = "INTERFACE_ETHERNET" else: - if_name, port_id = self.dcnm_intf_get_if_name(cfg['name'], 'sub_int') - if_type = 'SUBINTERFACE' - elif (cfg['name'][0:3].lower() == 'vpc'): - if_name, port_id = self.dcnm_intf_get_if_name(cfg['name'], 'vpc') - if_type = 'INTERFACE_VPC' + if_name, port_id = self.dcnm_intf_get_if_name(cfg["name"], "sub_int") + if_type = "SUBINTERFACE" + elif cfg["name"][0:3].lower() == "vpc": + if_name, port_id = self.dcnm_intf_get_if_name(cfg["name"], "vpc") + if_type = "INTERFACE_VPC" else: - if_name = '' - if_type = '' + if_name = "" + if_type = "" return if_name, if_type def dcnm_intf_get_diff_query(self): for info in self.intf_info: - sno = self.ip_sn[info['switch'][0]] - if (info['name'] == ''): + sno = self.ip_sn[info["switch"][0]] + if info["name"] == "": # GET all interfaces path = self.paths["IF_DETAIL_WITH_SNO"].format(sno) else: @@ -2624,12 +2889,12 @@ def dcnm_intf_get_diff_query(self): # GET a specific interface path = self.paths["IF_WITH_SNO_IFNAME"].format(sno, ifname) - resp = dcnm_send(self.module, 'GET', path) + resp = dcnm_send(self.module, "GET", path) - if ('DATA' in resp and resp['DATA']): - self.diff_query.extend(resp['DATA']) - self.changed_dict[0]['query'].extend(self.diff_query) - self.result['response'].extend(self.diff_query) + if "DATA" in resp and resp["DATA"]: + self.diff_query.extend(resp["DATA"]) + self.changed_dict[0]["query"].extend(self.diff_query) + self.result["response"].extend(self.diff_query) def dcnm_parse_response(self, resp): @@ -2640,7 +2905,7 @@ def dcnm_parse_response(self, resp): "MESSAGE": "OK", "METHOD": "POST", "REQUEST_PATH": "", - "RETURN_CODE": 200 + "RETURN_CODE": 200, } # Get a list of entities from the deploy. We will have to check @@ -2650,40 +2915,42 @@ def dcnm_parse_response(self, resp): ent_resp = {} for ent in entities: - ent_resp[ent] = 'No Error' - if (isinstance(resp['DATA'], list)): - for data in resp['DATA']: - host = data.get('entity') - if (host): - if (self.hn_sn.get(host) == ent): - ent_resp[ent] = data.get('message') + ent_resp[ent] = "No Error" + if isinstance(resp["DATA"], list): + for data in resp["DATA"]: + host = data.get("entity") + if host: + if self.hn_sn.get(host) == ent: + ent_resp[ent] = data.get("message") else: - ent_resp[ent] = 'No Error' - elif (isinstance(resp['DATA'], str)): - ent_resp[ent] = resp['DATA'] + ent_resp[ent] = "No Error" + elif isinstance(resp["DATA"], str): + ent_resp[ent] = resp["DATA"] for ent in entities: - if (ent_resp[ent] == "No Error"): + if ent_resp[ent] == "No Error": # Consider this case as success. - succ_resp['REQUEST_PATH'] = resp['REQUEST_PATH'] - succ_resp['MESSAGE'] = 'OK' - succ_resp['METHOD'] = resp['METHOD'] - succ_resp['RETURN_CODE'] = 200 + succ_resp["REQUEST_PATH"] = resp["REQUEST_PATH"] + succ_resp["MESSAGE"] = "OK" + succ_resp["METHOD"] = resp["METHOD"] + succ_resp["RETURN_CODE"] = 200 return succ_resp, True - elif (('No Commands to execute' in ent_resp[ent]) or - (ent_resp[ent] == 'Failed to fetch policies') or - (ent_resp[ent] == 'Failed to fetch switch configuration')): + elif ( + ("No Commands to execute" in ent_resp[ent]) + or (ent_resp[ent] == "Failed to fetch policies") + or (ent_resp[ent] == "Failed to fetch switch configuration") + ): # Consider this case as success. - succ_resp['REQUEST_PATH'] = resp['REQUEST_PATH'] - succ_resp['MESSAGE'] = 'OK' - succ_resp['METHOD'] = resp['METHOD'] - succ_resp['ORIG_MSG'] = ent_resp[ent] - succ_resp['RETURN_CODE'] = 200 + succ_resp["REQUEST_PATH"] = resp["REQUEST_PATH"] + succ_resp["MESSAGE"] = "OK" + succ_resp["METHOD"] = resp["METHOD"] + succ_resp["ORIG_MSG"] = ent_resp[ent] + succ_resp["RETURN_CODE"] = 200 else: failed = True break - if (failed): + if failed: return resp, False else: return succ_resp, False @@ -2691,7 +2958,7 @@ def dcnm_parse_response(self, resp): def dcnm_intf_send_message_handle_retry(self, action, path, payload, cmd): count = 1 - while (count < 20): + while count < 20: resp = dcnm_send(self.module, action, path, payload) @@ -2700,7 +2967,7 @@ def dcnm_intf_send_message_handle_retry(self, action, path, payload, cmd): # Consider that as success and mark the change flag as 'False; to indicate # nothinbg actually changed - if ((resp.get('MESSAGE') == 'OK') and (resp.get('RETURN_CODE') == 200)): + if (resp.get("MESSAGE") == "OK") and (resp.get("RETURN_CODE") == 200): return resp, True presp, changed = self.dcnm_parse_response(resp) @@ -2716,7 +2983,10 @@ def dcnm_intf_get_entities_list(self, deploy): sn_list = [] usno = [] - [[sn_list.append(v) for k, v in d.items() if k == 'serialNumber'] for d in deploy] + [ + [sn_list.append(v) for k, v in d.items() if k == "serialNumber"] + for d in deploy + ] # For vPC cases, serial numbers will be a combined one. But deploy responses from the DCNM # controller will be based on individual switches. So we will have to split up the serial @@ -2726,20 +2996,20 @@ def dcnm_intf_get_entities_list(self, deploy): vpc = False for num in ulist: - if ('~' in num): + if "~" in num: vpc = True - slist = num.split('~') + slist = num.split("~") usno.append(slist[0]) usno.append(slist[1]) - if (vpc is True): + if vpc is True: ulist = usno return ulist - def dcnm_intf_check_deployment_status (self, deploy_list): + def dcnm_intf_check_deployment_status(self, deploy_list): # Check for deployment status of all the configured objects only if the check_deploy flag is set. - if self.module.params['check_deploy'] is False: + if self.module.params["check_deploy"] is False: return path = self.paths["GLOBAL_IF_DEPLOY"] @@ -2748,28 +3018,47 @@ def dcnm_intf_check_deployment_status (self, deploy_list): retries = 0 while retries < 50: retries += 1 - name = item['ifName'] - sno = item['serialNumber'] - - match_have = [have for have in self.have_all if ((name.lower() == have['ifName'].lower()) and - (sno == have['serialNo']) and - (self.fabric == have['fabricName']))] + name = item["ifName"] + sno = item["serialNumber"] + + match_have = [ + have + for have in self.have_all + if ( + (name.lower() == have["ifName"].lower()) + and (sno == have["serialNo"]) + and (self.fabric == have["fabricName"]) + ) + ] if match_have: - if (match_have[0]['complianceStatus'] == 'In-Sync'): + if match_have[0]["complianceStatus"] == "In-Sync": break - else: - if retries == 10 or retries == 20: - json_payload = json.dumps({"ifName": name, "serialNumber": sno, "fabricName": self.fabric}) - dcnm_send(self.module, 'POST', path, json_payload) - time.sleep (20) - self.have_all = [] - self.dcnm_intf_get_have_all_with_sno(sno) + + if retries == 10 or retries == 20: + json_payload = json.dumps( + { + "ifName": name, + "serialNumber": sno, + "fabricName": self.fabric, + } + ) + dcnm_send(self.module, "POST", path, json_payload) + time.sleep(20) + self.have_all = [] + self.dcnm_intf_get_have_all_with_sno(sno) else: # For merge state, the interfaces would have been created just now. Fetch them again before checking self.have_all = [] self.dcnm_intf_get_have_all_with_sno(sno) - if (match_have == [] or match_have[0]['complianceStatus'] != 'In-Sync'): - self.module.fail_json (msg={"FAILURE REASON": "Interafce "+name+" did not reach 'In-Sync' State", "Compliance Status" : match_have[0]["complianceStatus"]}) + if match_have == [] or match_have[0]["complianceStatus"] != "In-Sync": + self.module.fail_json( + msg={ + "FAILURE REASON": "Interafce " + + name + + " did not reach 'In-Sync' State", + "Compliance Status": match_have[0]["complianceStatus"], + } + ) def dcnm_intf_send_message_to_dcnm(self): @@ -2788,41 +3077,44 @@ def dcnm_intf_send_message_to_dcnm(self): for delem in self.diff_delete: - if (delem == []): + if delem == []: continue json_payload = json.dumps(delem) - resp = dcnm_send(self.module, 'DELETE', path, json_payload) + resp = dcnm_send(self.module, "DELETE", path, json_payload) if resp.get("RETURN_CODE") != 200: deploy_failed = False for item in resp["DATA"]: - if 'No Commands to execute' not in item["message"]: + if "No Commands to execute" not in item["message"]: deploy_failed = True if deploy_failed is False: resp["RETURN_CODE"] = 200 resp["MESSAGE"] = "OK" - if ((resp.get('MESSAGE') != 'OK') or (resp.get('RETURN_CODE') != 200)): + if (resp.get("MESSAGE") != "OK") or (resp.get("RETURN_CODE") != 200): # there may be cases which are not actual failures. retry the # action - resp, rc = self.dcnm_intf_send_message_handle_retry('DELETE', path, - json_payload, 'DELETE') + resp, rc = self.dcnm_intf_send_message_handle_retry( + "DELETE", path, json_payload, "DELETE" + ) # Even if one of the elements succeed, changed must be set to # True. Once changed becomes True, then it remains True - if (False is changed): + if False is changed: changed = rc - if (((resp.get('MESSAGE') != 'OK') and ('No Commands to execute' not in resp.get('MESSAGE'))) or - (resp.get('RETURN_CODE') != 200)): + if ( + (resp.get("MESSAGE") != "OK") + and ("No Commands to execute" not in resp.get("MESSAGE")) + ) or (resp.get("RETURN_CODE") != 200): self.module.fail_json(msg=resp) else: changed = True delete = changed - self.result['response'].append(resp) + self.result["response"].append(resp) resp = None @@ -2834,28 +3126,38 @@ def dcnm_intf_send_message_to_dcnm(self): for delem in self.diff_delete_deploy: index = index + 1 - if (delem == []): + if delem == []: continue - if index != self.int_index ["INTERFACE_VPC"]: + if index != self.int_index["INTERFACE_VPC"]: # Deploy just requires ifName and serialNumber - [[item.pop('interfaceType'), item.pop('fabricName'), item.pop('interfaceDbId')] for item in delem] + [ + [ + item.pop("interfaceType"), + item.pop("fabricName"), + item.pop("interfaceDbId"), + ] + for item in delem + ] else: - [[item.pop('interfaceType'), item.pop('interfaceDbId')] for item in delem] + [ + [item.pop("interfaceType"), item.pop("interfaceDbId")] + for item in delem + ] json_payload = json.dumps(delem) - resp = dcnm_send(self.module, 'POST', path, json_payload) + resp = dcnm_send(self.module, "POST", path, json_payload) if resp.get("RETURN_CODE") != 200: deploy_failed = False for item in resp["DATA"]: - if 'No Commands to execute' not in item["message"]: + if "No Commands to execute" not in item["message"]: deploy_failed = True if deploy_failed is False: resp["RETURN_CODE"] = 200 resp["MESSAGE"] = "OK" - self.result['response'].append(resp) + self.result["response"].append(resp) resp = None @@ -2863,10 +3165,10 @@ def dcnm_intf_send_message_to_dcnm(self): for payload in self.diff_replace: json_payload = json.dumps(payload) - resp = dcnm_send(self.module, 'PUT', path, json_payload) - self.result['response'].append(resp) + resp = dcnm_send(self.module, "PUT", path, json_payload) + self.result["response"].append(resp) - if ((resp.get('MESSAGE') != 'OK') or (resp.get('RETURN_CODE') != 200)): + if (resp.get("MESSAGE") != "OK") or (resp.get("RETURN_CODE") != 200): self.module.fail_json(msg=resp) else: replace = True @@ -2877,10 +3179,10 @@ def dcnm_intf_send_message_to_dcnm(self): for payload in self.diff_create: json_payload = json.dumps(payload) - resp = dcnm_send(self.module, 'POST', path, json_payload) - self.result['response'].append(resp) + resp = dcnm_send(self.module, "POST", path, json_payload) + self.result["response"].append(resp) - if ((resp.get('MESSAGE') != 'OK') or (resp.get('RETURN_CODE') != 200)): + if (resp.get("MESSAGE") != "OK") or (resp.get("RETURN_CODE") != 200): self.module.fail_json(msg=resp) else: create = True @@ -2888,13 +3190,13 @@ def dcnm_intf_send_message_to_dcnm(self): resp = None path = self.paths["GLOBAL_IF_DEPLOY"] - if (self.diff_deploy): + if self.diff_deploy: json_payload = json.dumps(self.diff_deploy) - resp = dcnm_send(self.module, 'POST', path, json_payload) + resp = dcnm_send(self.module, "POST", path, json_payload) - if ((resp.get('MESSAGE') != 'OK') and (resp.get('RETURN_CODE') != 200)): + if (resp.get("MESSAGE") != "OK") and (resp.get("RETURN_CODE") != 200): resp, rc = self.dcnm_parse_response(resp) changed = rc else: @@ -2902,126 +3204,144 @@ def dcnm_intf_send_message_to_dcnm(self): deploy = changed - self.result['response'].append(resp) + self.result["response"].append(resp) resp = None - if (self.diff_deploy): + if self.diff_deploy: # Do a second deploy. Sometimes even if interfaces are created, they are # not being deployed. A second deploy solves the same. Don't worry about # the return values - resp = dcnm_send(self.module, 'POST', path, json_payload) + resp = dcnm_send(self.module, "POST", path, json_payload) resp = None - if self.diff_deploy: - self.dcnm_intf_check_deployment_status (self.diff_deploy) + if self.diff_deploy: + self.dcnm_intf_check_deployment_status(self.diff_deploy) # In overridden and deleted states, if no delete or create is happening and we have # only replace, then check the return message for deploy. If it says # "No Commands to execute", then the interfaces we are replacing are # already in the required state and so consider that a no change - if ((self.module.params['state'] == 'overridden') or - (self.module.params['state'] == 'deleted')): - self.result['changed'] = (delete or create or deploy) + if (self.module.params["state"] == "overridden") or ( + self.module.params["state"] == "deleted" + ): + self.result["changed"] = delete or create or deploy else: - if (delete or create or replace or deploy): - self.result['changed'] = True + if delete or create or replace or deploy: + self.result["changed"] = True else: - self.result['changed'] = False + self.result["changed"] = False def dcnm_translate_switch_info(self, config, ip_sn, hn_sn): - if (None is config): + if None is config: return for cfg in config: index = 0 - if (None is cfg.get('switch', None)): + if None is cfg.get("switch", None): continue - for sw_elem in cfg['switch']: + for sw_elem in cfg["switch"]: addr_info = dcnm_get_ip_addr_info(self.module, sw_elem, ip_sn, hn_sn) - cfg['switch'][index] = addr_info + cfg["switch"][index] = addr_info index = index + 1 # Check if the VPC serial number information is already present. If not fetch that - if (self.vpc_ip_sn.get(addr_info, None) is None): + if self.vpc_ip_sn.get(addr_info, None) is None: sno = self.dcnm_intf_get_vpc_serial_number(addr_info) - if ('~' in sno): + if "~" in sno: # This switch is part of VPC pair. Populate the VPC serial number DB self.vpc_ip_sn[addr_info] = sno def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - fabric=dict(required=True, type='str'), - config=dict(required=False, type='list', elements='dict'), - state=dict(type='str', default='merged', - choices=['merged', 'replaced', 'overridden', 'deleted', - 'query']), - check_deploy=dict(type='bool', default=False) + fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + type="str", + default="merged", + choices=["merged", "replaced", "overridden", "deleted", "query"], + ), + check_deploy=dict(type="bool", default=False), ) - module = AnsibleModule(argument_spec=element_spec, - supports_check_mode=True) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_intf = DcnmIntf(module) if not dcnm_intf.ip_sn: - dcnm_intf.result['msg'] = "Fabric {} missing on DCNM or does not have any switches".format(dcnm_intf.fabric) - module.fail_json(msg="Fabric {} missing on DCNM or does not have any switches".format(dcnm_intf.fabric)) + dcnm_intf.result[ + "msg" + ] = "Fabric {0} missing on DCNM or does not have any switches".format( + dcnm_intf.fabric + ) + module.fail_json( + msg="Fabric {0} missing on DCNM or does not have any switches".format( + dcnm_intf.fabric + ) + ) - state = module.params['state'] + state = module.params["state"] if not dcnm_intf.config: - if state == 'merged' or state == 'replaced' or state == 'query': - module.fail_json(msg="'config' element is mandatory for state '{}', given = '{}'".format(state, dcnm_intf.config)) - - dcnm_intf.dcnm_translate_switch_info(dcnm_intf.config, dcnm_intf.ip_sn, - dcnm_intf.hn_sn) + if state == "merged" or state == "replaced" or state == "query": + module.fail_json( + msg="'config' element is mandatory for state '{0}', given = '{1}'".format( + state, dcnm_intf.config + ) + ) + + dcnm_intf.dcnm_translate_switch_info( + dcnm_intf.config, dcnm_intf.ip_sn, dcnm_intf.hn_sn + ) dcnm_intf.dcnm_intf_copy_config() dcnm_intf.dcnm_intf_validate_input() # state 'deleted' may not include all the information - if ((module.params['state'] != 'query') and (module.params['state'] != 'deleted')): + if (module.params["state"] != "query") and (module.params["state"] != "deleted"): dcnm_intf.dcnm_intf_get_want() dcnm_intf.dcnm_intf_get_have() - if (module.params['state'] == 'merged'): + if module.params["state"] == "merged": dcnm_intf.dcnm_intf_get_diff_merge() - if (module.params['state'] == 'replaced'): + if module.params["state"] == "replaced": dcnm_intf.dcnm_intf_get_diff_replaced() - if (module.params['state'] == 'overridden'): - if (dcnm_intf.config is None): + if module.params["state"] == "overridden": + if dcnm_intf.config is None: dcnm_intf.dcnm_intf_get_diff_overridden([]) else: dcnm_intf.dcnm_intf_get_diff_overridden(None) - if (module.params['state'] == 'deleted'): + if module.params["state"] == "deleted": dcnm_intf.dcnm_intf_get_diff_deleted() - if (module.params['state'] == 'query'): + if module.params["state"] == "query": dcnm_intf.dcnm_intf_get_diff_query() - dcnm_intf.result['diff'] = dcnm_intf.changed_dict - - if (dcnm_intf.diff_create or dcnm_intf.diff_replace or dcnm_intf.diff_deploy or - dcnm_intf.diff_delete[dcnm_intf.int_index['INTERFACE_PORT_CHANNEL']] or - dcnm_intf.diff_delete[dcnm_intf.int_index['INTERFACE_VPC']] or - dcnm_intf.diff_delete[dcnm_intf.int_index['INTERFACE_ETHERNET']] or - dcnm_intf.diff_delete[dcnm_intf.int_index['SUBINTERFACE']] or - dcnm_intf.diff_delete[dcnm_intf.int_index['INTERFACE_LOOPBACK']]): - dcnm_intf.result['changed'] = True + dcnm_intf.result["diff"] = dcnm_intf.changed_dict + + if ( + dcnm_intf.diff_create + or dcnm_intf.diff_replace + or dcnm_intf.diff_deploy + or dcnm_intf.diff_delete[dcnm_intf.int_index["INTERFACE_PORT_CHANNEL"]] + or dcnm_intf.diff_delete[dcnm_intf.int_index["INTERFACE_VPC"]] + or dcnm_intf.diff_delete[dcnm_intf.int_index["INTERFACE_ETHERNET"]] + or dcnm_intf.diff_delete[dcnm_intf.int_index["SUBINTERFACE"]] + or dcnm_intf.diff_delete[dcnm_intf.int_index["INTERFACE_LOOPBACK"]] + ): + dcnm_intf.result["changed"] = True else: module.exit_json(**dcnm_intf.result) @@ -3033,5 +3353,5 @@ def main(): module.exit_json(**dcnm_intf.result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_inventory.py b/plugins/modules/dcnm_inventory.py index 2033fd250..7225be3be 100644 --- a/plugins/modules/dcnm_inventory.py +++ b/plugins/modules/dcnm_inventory.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Karthik Babu Harichandra Babu" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_inventory short_description: Add and remove Switches from a DCNM managed VXLAN fabric. @@ -86,9 +88,9 @@ - Set this to false for greenfield deployment and true for brownfield deployment type: str required: true -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # This module supports the following states: # # Merged: @@ -196,7 +198,7 @@ cisco.dcnm.dcnm_inventory: fabric: vxlan-fabric state: query # merged / deleted / overridden / query -''' +""" import time import copy @@ -204,19 +206,22 @@ import re from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import \ - dcnm_send, validate_list_of_dicts, dcnm_get_ip_addr_info, dcnm_version_supported, \ - get_fabric_details +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + dcnm_send, + validate_list_of_dicts, + dcnm_get_ip_addr_info, + dcnm_version_supported, + get_fabric_details, +) class DcnmInventory: - def __init__(self, module): self.switches = {} self.module = module self.params = module.params - self.fabric = module.params['fabric'] - self.config = module.params.get('config') + self.fabric = module.params["fabric"] + self.config = module.params.get("config") self.check_mode = False self.validated = [] self.have_create = [] @@ -226,13 +231,9 @@ def __init__(self, module): self.diff_delete = {} self.query = [] self.node_migration = False - self.nd_prefix = '/appcenter/cisco/ndfc/api/v1/lan-fabric' + self.nd_prefix = "/appcenter/cisco/ndfc/api/v1/lan-fabric" - self.result = dict( - changed=False, - diff=[], - response=[] - ) + self.result = dict(changed=False, diff=[], response=[]) self.controller_version = dcnm_version_supported(self.module) self.fabric_details = get_fabric_details(self.module, self.fabric) @@ -242,52 +243,51 @@ def __init__(self, module): def update_discover_params(self, inv): # with the inv parameters perform the test-reachability (discover) - method = 'POST' - path = '/rest/control/fabrics/{}/inventory/test-reachability'.format(self.fabric) + method = "POST" + path = "/rest/control/fabrics/{0}/inventory/test-reachability".format( + self.fabric + ) if self.nd: path = self.nd_prefix + path response = dcnm_send(self.module, method, path, json.dumps(inv)) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "create") if fail: self.module.fail_json(msg=response) - if ('DATA' in response): - return response['DATA'] + if "DATA" in response: + return response["DATA"] else: return 0 def update_create_params(self, inv): - s_ip = 'None' - if inv['seed_ip']: - s_ip = dcnm_get_ip_addr_info(self.module, inv['seed_ip'], None, None) + s_ip = "None" + if inv["seed_ip"]: + s_ip = dcnm_get_ip_addr_info(self.module, inv["seed_ip"], None, None) - state = self.params['state'] + state = self.params["state"] - if state == 'deleted': - inv_upd = { - "seedIP": s_ip, - } - elif state == 'query': + if state == "deleted": inv_upd = { "seedIP": s_ip, - "role": inv['role'].replace(" ", "_") } + elif state == "query": + inv_upd = {"seedIP": s_ip, "role": inv["role"].replace(" ", "_")} else: - if inv['auth_proto'] == 'MD5': + if inv["auth_proto"] == "MD5": pro = 0 - elif inv['auth_proto'] == 'SHA': + elif inv["auth_proto"] == "SHA": pro = 1 - elif inv['auth_proto'] == 'MD5_DES': + elif inv["auth_proto"] == "MD5_DES": pro = 2 - elif inv['auth_proto'] == 'MD5_AES': + elif inv["auth_proto"] == "MD5_AES": pro = 3 - elif inv['auth_proto'] == 'SHA_DES': + elif inv["auth_proto"] == "SHA_DES": pro = 4 - elif inv['auth_proto'] == 'SHA_AES': + elif inv["auth_proto"] == "SHA_AES": pro = 5 else: pro = 0 @@ -295,15 +295,15 @@ def update_create_params(self, inv): inv_upd = { "seedIP": s_ip, "snmpV3AuthProtocol": pro, - "username": inv['user_name'], - "password": inv['password'], - "maxHops": inv['max_hops'], + "username": inv["user_name"], + "password": inv["password"], + "maxHops": inv["max_hops"], "cdpSecondTimeout": "5", - "role": inv['role'].replace(" ", "_"), - "preserveConfig": inv['preserve_config'] + "role": inv["role"].replace(" ", "_"), + "preserveConfig": inv["preserve_config"], } - resp = (self.update_discover_params(inv_upd)) + resp = self.update_discover_params(inv_upd) inv_upd["switches"] = resp @@ -311,39 +311,46 @@ def update_create_params(self, inv): def get_have(self): - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(self.fabric) if self.nd: path = self.nd_prefix + path inv_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(inv_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(inv_objects, "query_dcnm") - if inv_objects.get('ERROR') == 'Not Found' and inv_objects.get('RETURN_CODE') == 404: - self.module.fail_json(msg="Fabric {} not present on DCNM".format(self.fabric)) + if ( + inv_objects.get("ERROR") == "Not Found" + and inv_objects.get("RETURN_CODE") == 404 + ): + self.module.fail_json( + msg="Fabric {0} not present on DCNM".format(self.fabric) + ) return if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find inventories under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find inventories under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not inv_objects['DATA']: + if not inv_objects["DATA"]: return have_switch = [] - for inv in inv_objects['DATA']: + for inv in inv_objects["DATA"]: get_switch = {} - get_switch.update({'sysName': inv['logicalName']}) - get_switch.update({'serialNumber': inv['serialNumber']}) - get_switch.update({'ipaddr': inv['ipAddress']}) - get_switch.update({'platform': inv['nonMdsModel']}) - get_switch.update({'version': inv['release']}) - get_switch.update({'deviceIndex': inv['logicalName'] + '(' + inv['serialNumber'] + ')'}) - get_switch.update({'role': inv['switchRole'].replace(" ", "_")}) - get_switch.update({'mode': inv['mode']}) - get_switch.update({'serialNumber': inv['serialNumber']}) + get_switch.update({"sysName": inv["logicalName"]}) + get_switch.update({"serialNumber": inv["serialNumber"]}) + get_switch.update({"ipaddr": inv["ipAddress"]}) + get_switch.update({"platform": inv["nonMdsModel"]}) + get_switch.update({"version": inv["release"]}) + get_switch.update( + {"deviceIndex": inv["logicalName"] + "(" + inv["serialNumber"] + ")"} + ) + get_switch.update({"role": inv["switchRole"].replace(" ", "_")}) + get_switch.update({"mode": inv["mode"]}) + get_switch.update({"serialNumber": inv["serialNumber"]}) switchdict = {} switchlst = [] switchlst.append(get_switch) @@ -392,16 +399,22 @@ def get_diff_replace_delete(self): def have_in_want(have_c): match_found = False for want_c in self.want_create: - match = re.search(r'\S+\((\S+)\)', want_c["switches"][0]['deviceIndex']) + match = re.search(r"\S+\((\S+)\)", want_c["switches"][0]["deviceIndex"]) if match is None: continue want_serial_num = match.groups()[0] - if have_c["switches"][0]['serialNumber'] == want_serial_num: - if have_c["switches"][0]['ipaddr'] == want_c["switches"][0]['ipaddr'] and \ - have_c["switches"][0]['platform'] == want_c["switches"][0]['platform'] and \ - have_c["switches"][0]['version'] == want_c["switches"][0]['version'] and \ - have_c["switches"][0]['sysName'] == want_c["switches"][0]['sysName'] and \ - have_c["switches"][0]['role'] == want_c['role']: + if have_c["switches"][0]["serialNumber"] == want_serial_num: + if ( + have_c["switches"][0]["ipaddr"] + == want_c["switches"][0]["ipaddr"] + and have_c["switches"][0]["platform"] + == want_c["switches"][0]["platform"] + and have_c["switches"][0]["version"] + == want_c["switches"][0]["version"] + and have_c["switches"][0]["sysName"] + == want_c["switches"][0]["sysName"] + and have_c["switches"][0]["role"] == want_c["role"] + ): match_found = True return match_found @@ -409,8 +422,8 @@ def have_in_want(have_c): for have_c in self.have_create: if have_in_want(have_c): continue - else: - diff_delete.append(have_c["switches"][0]['serialNumber']) + + diff_delete.append(have_c["switches"][0]["serialNumber"]) self.diff_delete = diff_delete @@ -421,13 +434,13 @@ def get_diff_delete(self): if self.config: for want_c in self.want_create: for have_c in self.have_create: - if (have_c["switches"][0]['ipaddr'] == want_c['seedIP']): - diff_delete.append(have_c["switches"][0]['serialNumber']) + if have_c["switches"][0]["ipaddr"] == want_c["seedIP"]: + diff_delete.append(have_c["switches"][0]["serialNumber"]) continue else: for have_c in self.have_create: - diff_delete.append(have_c["switches"][0]['serialNumber']) + diff_delete.append(have_c["switches"][0]["serialNumber"]) self.diff_delete = diff_delete @@ -438,20 +451,25 @@ def get_diff_merge(self): for want_c in self.want_create: found = False for have_c in self.have_create: - match = re.search(r'\S+\((\S+)\)', want_c["switches"][0]['deviceIndex']) + match = re.search(r"\S+\((\S+)\)", want_c["switches"][0]["deviceIndex"]) if match is None: continue serial_num = match.groups()[0] - if want_c["switches"][0]['ipaddr'] == have_c["switches"][0]['ipaddr'] and \ - serial_num == have_c["switches"][0]['serialNumber'] \ - and want_c["switches"][0]['platform'] == have_c["switches"][0]['platform'] and \ - want_c["switches"][0]['version'] == have_c["switches"][0]['version'] \ - and want_c["switches"][0]['sysName'] == have_c["switches"][0]['sysName'] \ - and want_c['role'] == have_c["switches"][0]['role']: + if ( + want_c["switches"][0]["ipaddr"] == have_c["switches"][0]["ipaddr"] + and serial_num == have_c["switches"][0]["serialNumber"] + and want_c["switches"][0]["platform"] + == have_c["switches"][0]["platform"] + and want_c["switches"][0]["version"] + == have_c["switches"][0]["version"] + and want_c["switches"][0]["sysName"] + == have_c["switches"][0]["sysName"] + and want_c["role"] == have_c["switches"][0]["role"] + ): found = True - if have_c["switches"][0]['mode'] == "Migration": + if have_c["switches"][0]["mode"] == "Migration": # Switch is already discovered using DCNM GUI # Perform assign-role/config-save/config-deploy self.node_migration = True @@ -481,135 +499,182 @@ def get_diff_merge(self): def validate_input(self): """Parse the playbook values, validate to param specs.""" - state = self.params['state'] + state = self.params["state"] - if state == 'merged' or state == 'overridden': + if state == "merged" or state == "overridden": inv_spec = dict( - seed_ip=dict(required=True, type='str'), - auth_proto=dict(type='str', - choices=['MD5', 'SHA', 'MD5_DES', 'MD5_AES', 'SHA_DES', 'SHA_AES'], - default='MD5'), - user_name=dict(required=True, type='str', no_log=True, length_max=32), - password=dict(required=True, type='str', no_log=True, length_max=32), - max_hops=dict(type='int', default=0), - role=dict(type='str', - choices=['leaf', 'spine', 'border', 'border_spine', 'border_gateway', 'border_gateway_spine', - 'super_spine', 'border_super_spine', 'border_gateway_super_spine'], - default='leaf'), - preserve_config=dict(type='bool', default=False) + seed_ip=dict(required=True, type="str"), + auth_proto=dict( + type="str", + choices=["MD5", "SHA", "MD5_DES", "MD5_AES", "SHA_DES", "SHA_AES"], + default="MD5", + ), + user_name=dict(required=True, type="str", no_log=True, length_max=32), + password=dict(required=True, type="str", no_log=True, length_max=32), + max_hops=dict(type="int", default=0), + role=dict( + type="str", + choices=[ + "leaf", + "spine", + "border", + "border_spine", + "border_gateway", + "border_gateway_spine", + "super_spine", + "border_super_spine", + "border_gateway_super_spine", + ], + default="leaf", + ), + preserve_config=dict(type="bool", default=False), ) msg = None if self.config: for inv in self.config: - if 'seed_ip' not in inv or 'user_name' not in inv or 'password' not in inv: + if ( + "seed_ip" not in inv + or "user_name" not in inv + or "password" not in inv + ): msg = "seed ip/user name and password are mandatory under inventory parameters" else: - if state == 'merged': - msg = "config: element is mandatory for this state {}".format(state) + if state == "merged": + msg = "config: element is mandatory for this state {0}".format( + state + ) if msg: self.module.fail_json(msg=msg) if self.config: - valid_inv, invalid_params = validate_list_of_dicts(self.config, inv_spec, self.module) + valid_inv, invalid_params = validate_list_of_dicts( + self.config, inv_spec, self.module + ) for inv in valid_inv: self.validated.append(inv) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) - elif state == 'deleted': + elif state == "deleted": - inv_spec = dict( - seed_ip=dict(required=True, type='str') - ) + inv_spec = dict(seed_ip=dict(required=True, type="str")) msg = None if self.config: for inv in self.config: - if 'seed_ip' not in inv: + if "seed_ip" not in inv: msg = "seed ip is mandatory under inventory parameters for switch deletion" if msg: self.module.fail_json(msg=msg) if self.config: - valid_inv, invalid_params = validate_list_of_dicts(self.config, inv_spec) + valid_inv, invalid_params = validate_list_of_dicts( + self.config, inv_spec + ) for inv in valid_inv: self.validated.append(inv) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: inv_spec = dict( - seed_ip=dict(type='str'), - role=dict(type='str', - choices=['leaf', 'spine', 'border', 'border_spine', 'border_gateway', 'border_gateway_spine', - 'super_spine', 'border_super_spine', 'border_gateway_super_spine', 'None'], - default='None') + seed_ip=dict(type="str"), + role=dict( + type="str", + choices=[ + "leaf", + "spine", + "border", + "border_spine", + "border_gateway", + "border_gateway_spine", + "super_spine", + "border_super_spine", + "border_gateway_super_spine", + "None", + ], + default="None", + ), ) if self.config: - valid_inv, invalid_params = validate_list_of_dicts(self.config, inv_spec) + valid_inv, invalid_params = validate_list_of_dicts( + self.config, inv_spec + ) for inv in valid_inv: self.validated.append(inv) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) def import_switches(self): - method = 'POST' - path = '/rest/control/fabrics/{}'.format(self.fabric) + method = "POST" + path = "/rest/control/fabrics/{0}".format(self.fabric) if self.nd: path = self.nd_prefix + path # create_path = path + '/inventory/discover?gfBlockingCall=true' - create_path = path + '/inventory/discover' + create_path = path + "/inventory/discover" if self.diff_create: for create in self.diff_create: - import_response = dcnm_send(self.module, method, create_path, json.dumps(create)) - self.result['response'].append(import_response) - fail, self.result['changed'] = self.handle_response(import_response, "create") + import_response = dcnm_send( + self.module, method, create_path, json.dumps(create) + ) + self.result["response"].append(import_response) + fail, self.result["changed"] = self.handle_response( + import_response, "create" + ) if fail: self.failure(import_response) def rediscover_switch(self, serial_num): - method = 'POST' - path = '/rest/control/fabrics/{}/inventory/rediscover/{}'.format(self.fabric, serial_num) + method = "POST" + path = "/rest/control/fabrics/{0}/inventory/rediscover/{1}".format( + self.fabric, serial_num + ) if self.nd: path = self.nd_prefix + path response = dcnm_send(self.module, method, path) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "create") if fail: self.failure(response) def rediscover_all_switches(self): # Get Fabric Inventory Details - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(self.fabric) if self.nd: path = self.nd_prefix + path get_inv = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_inv, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(get_inv, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find inventories under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find inventories under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not get_inv.get('DATA'): + if not get_inv.get("DATA"): return def ready_to_continue(inv_data): @@ -620,8 +685,8 @@ def ready_to_continue(inv_data): # First check migration mode. Switches will enter migration mode # even if the GRFIELD_DEBUG_FLAG is enabled so this needs to be # checked first. - for switch in inv_data.get('DATA'): - if switch['mode'].lower() == "migration": + for switch in inv_data.get("DATA"): + if switch["mode"].lower() == "migration": # At least one switch is still in migration mode # so not ready to continue return False @@ -629,7 +694,7 @@ def ready_to_continue(inv_data): # Check # 2 # The fabric has a setting to prevent reload for greenfield # deployments. If this is enabled we can skip check 3 and just return True - if self.fabric_details['nvPairs']['GRFIELD_DEBUG_FLAG'].lower() == "enable": + if self.fabric_details["nvPairs"]["GRFIELD_DEBUG_FLAG"].lower() == "enable": return True # Check # 3 @@ -638,8 +703,8 @@ def ready_to_continue(inv_data): # the switch will show up as managable for a period of time before it # moves to unmanagable but we need to wait for this to allow enough time # for the reload to completed. - for switch in inv_data.get('DATA'): - if not switch['managable']: + for switch in inv_data.get("DATA"): + if not switch["managable"]: # We found our first switch that changed state to # unmanageable because it's reloading. Now we can # continue @@ -650,8 +715,8 @@ def ready_to_continue(inv_data): def switches_managable(inv_data): managable = True - for switch in inv_data['DATA']: - if not switch['managable']: + for switch in inv_data["DATA"]: + if not switch["managable"]: managable = False break @@ -665,7 +730,7 @@ def switches_managable(inv_data): # we don't need to loop. all_brownfield_switches = True for switch in self.config: - if not switch['preserve_config']: + if not switch["preserve_config"]: all_brownfield_switches = False while attempt < total_attempts and not all_brownfield_switches: @@ -673,7 +738,10 @@ def switches_managable(inv_data): # Don't error out. We might miss the status change so worst case # scenario is that we loop 300 times and then bail out. if attempt == 1: - if self.fabric_details['nvPairs']['GRFIELD_DEBUG_FLAG'].lower() == "enable": + if ( + self.fabric_details["nvPairs"]["GRFIELD_DEBUG_FLAG"].lower() + == "enable" + ): # It may take a few seconds for switches to enter migration mode when # this flag is set. Give it a few seconds. time.sleep(20) @@ -682,93 +750,99 @@ def switches_managable(inv_data): time.sleep(5) attempt += 1 continue - else: - break + + break attempt = 1 total_attempts = 300 while attempt < total_attempts: if attempt == total_attempts: - msg = "Failed to rediscover switches after {} attempts".format(total_attempts) + msg = "Failed to rediscover switches after {0} attempts".format( + total_attempts + ) self.module.fail_json(msg=msg) get_inv = dcnm_send(self.module, method, path) if not switches_managable(get_inv): time.sleep(5) attempt += 1 continue - else: - break - for inv in get_inv['DATA']: - self.rediscover_switch(inv['serialNumber']) + break + + for inv in get_inv["DATA"]: + self.rediscover_switch(inv["serialNumber"]) def all_switches_ok(self): all_ok = True # Get Fabric Inventory Details - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(self.fabric) if self.nd: path = self.nd_prefix + path get_inv = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_inv, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(get_inv, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find inventories under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find inventories under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - for inv in get_inv['DATA']: - if inv['status'] != "ok": + for inv in get_inv["DATA"]: + if inv["status"] != "ok": all_ok = False - self.rediscover_switch(inv['serialNumber']) + self.rediscover_switch(inv["serialNumber"]) return all_ok def set_lancred_switch(self, set_lan): - method = 'POST' - path = '/fm/fmrest/lanConfig/saveSwitchCredentials' + method = "POST" + path = "/fm/fmrest/lanConfig/saveSwitchCredentials" if self.nd: - path = self.nd_prefix + '/' + path[6:] + path = self.nd_prefix + "/" + path[6:] response = dcnm_send(self.module, method, path, urlencode(set_lan)) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "create") if fail: self.failure(response) def lancred_all_switches(self): # Get Fabric Inventory Details - method = 'GET' - path = '/fm/fmrest/lanConfig/getLanSwitchCredentials' + method = "GET" + path = "/fm/fmrest/lanConfig/getLanSwitchCredentials" if self.nd: - path = self.nd_prefix + '/' + path[6:] + path = self.nd_prefix + "/" + path[6:] # lan_path = '/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/lanConfig/getLanSwitchCredentials' get_lan = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_lan, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(get_lan, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to getLanSwitchCredentials under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to getLanSwitchCredentials under fabric: {0}".format( + self.fabric + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not get_lan.get('DATA'): + if not get_lan.get("DATA"): return for create in self.want_create: - for lan in get_lan['DATA']: - if not lan['switchDbID']: - msg = "Unable to SWITCHDBID using getLanSwitchCredentials under fabric: {}".format(self.fabric) + for lan in get_lan["DATA"]: + if not lan["switchDbID"]: + msg = "Unable to SWITCHDBID using getLanSwitchCredentials under fabric: {0}".format( + self.fabric + ) self.module.fail_json(msg=msg) - if lan['ipAddress'] == create["switches"][0]['ipaddr']: + if lan["ipAddress"] == create["switches"][0]["ipaddr"]: set_lan = { - "switchIds": lan['switchDbID'], - "userName": create['username'], - "password": create['password'], - "v3Protocol": "0" + "switchIds": lan["switchDbID"], + "userName": create["username"], + "password": create["password"], + "v3Protocol": "0", } # TODO: Remove this check later.. should work on ND but does not for some reason if not self.nd: @@ -776,34 +850,40 @@ def lancred_all_switches(self): def assign_role(self): - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(self.fabric) if self.nd: path = self.nd_prefix + path get_role = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_role, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(get_role, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find inventories under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find inventories under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not get_role.get('DATA'): + if not get_role.get("DATA"): return for create in self.want_create: - for role in get_role['DATA']: - if not role['switchDbID']: - msg = "Unable to get SWITCHDBID using getLanSwitchCredentials under fabric: {}".format(self.fabric) + for role in get_role["DATA"]: + if not role["switchDbID"]: + msg = "Unable to get SWITCHDBID using getLanSwitchCredentials under fabric: {0}".format( + self.fabric + ) self.module.fail_json(msg=msg) - if role['ipAddress'] == create["switches"][0]['ipaddr']: - method = 'PUT' - path = '/fm/fmrest/topology/role/{}?newRole={}'.format(role['switchDbID'], create['role'].replace("_", "%20")) + if role["ipAddress"] == create["switches"][0]["ipaddr"]: + method = "PUT" + path = "/fm/fmrest/topology/role/{0}?newRole={1}".format( + role["switchDbID"], create["role"].replace("_", "%20") + ) if self.nd: - path = self.nd_prefix + '/' + path[6:] + path = self.nd_prefix + "/" + path[6:] response = dcnm_send(self.module, method, path) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response( + response, "create" + ) if fail: self.failure(response) @@ -814,47 +894,49 @@ def config_save(self): for x in range(0, no_of_tries): # Get Fabric ID - method = 'GET' - path = '/rest/control/fabrics/{}'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}".format(self.fabric) if self.nd: path = self.nd_prefix + path get_fid = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_fid, 'create_dcnm') + missing_fabric, not_ok = self.handle_response(get_fid, "create_dcnm") - if not get_fid.get('DATA'): + if not get_fid.get("DATA"): return - if not get_fid['DATA']['id']: - msg = "Unable to find id for fabric: {}".format(self.fabric) + if not get_fid["DATA"]["id"]: + msg = "Unable to find id for fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg) - fabric_id = get_fid['DATA']['id'] + fabric_id = get_fid["DATA"]["id"] # config-save - method = 'POST' - path = '/rest/control/fabrics/{}'.format(self.fabric) + method = "POST" + path = "/rest/control/fabrics/{0}".format(self.fabric) if self.nd: path = self.nd_prefix + path - save_path = path + '/config-save' + save_path = path + "/config-save" response = dcnm_send(self.module, method, save_path) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "create") if fail: self.failure(response) if response["RETURN_CODE"] != 200: # Get Fabric Errors - method = 'GET' - path = '/rest/control/fabrics/{}/errors'.format(fabric_id) + method = "GET" + path = "/rest/control/fabrics/{0}/errors".format(fabric_id) if self.nd: path = self.nd_prefix + path get_fiderr = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(get_fiderr, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(get_fiderr, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Could not get any fabric errors for fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Could not get any fabric errors for fabric: {0}".format( + self.fabric + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) else: @@ -868,14 +950,14 @@ def config_save(self): def config_deploy(self): # config-deploy - method = 'POST' - path = '/rest/control/fabrics/{}'.format(self.fabric) + method = "POST" + path = "/rest/control/fabrics/{0}".format(self.fabric) if self.nd: path = self.nd_prefix + path - path = path + '/config-deploy' + path = path + "/config-deploy" response = dcnm_send(self.module, method, path) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "create") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "create") if fail: self.failure(response) @@ -883,14 +965,14 @@ def config_deploy(self): def delete_switch(self): if self.diff_delete: - method = 'DELETE' + method = "DELETE" for sn in self.diff_delete: - path = '/rest/control/fabrics/{}/switches/{}'.format(self.fabric, sn) + path = "/rest/control/fabrics/{0}/switches/{1}".format(self.fabric, sn) if self.nd: path = self.nd_prefix + path response = dcnm_send(self.module, method, path) - self.result['response'].append(response) - fail, self.result['changed'] = self.handle_response(response, "delete") + self.result["response"].append(response) + fail, self.result["changed"] = self.handle_response(response, "delete") if fail: self.failure(response) @@ -899,43 +981,49 @@ def get_diff_query(self): query = [] - method = 'GET' - path = '/rest/control/fabrics/{}/inventory'.format(self.fabric) + method = "GET" + path = "/rest/control/fabrics/{0}/inventory".format(self.fabric) if self.nd: path = self.nd_prefix + path inv_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(inv_objects, 'query_dcnm') - - if inv_objects.get('ERROR') == 'Not Found' and inv_objects.get('RETURN_CODE') == 404: - self.module.fail_json(msg="Fabric {} not present on DCNM".format(self.fabric)) + missing_fabric, not_ok = self.handle_response(inv_objects, "query_dcnm") + + if ( + inv_objects.get("ERROR") == "Not Found" + and inv_objects.get("RETURN_CODE") == 404 + ): + self.module.fail_json( + msg="Fabric {0} not present on DCNM".format(self.fabric) + ) return if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find inventories under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find inventories under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not inv_objects['DATA']: + if not inv_objects["DATA"]: return if self.config: for want_c in self.want_create: - for inv in inv_objects['DATA']: - if want_c['role'] == 'None' and want_c["seedIP"] != 'None': - if want_c["seedIP"] == inv['ipAddress']: + for inv in inv_objects["DATA"]: + if want_c["role"] == "None" and want_c["seedIP"] != "None": + if want_c["seedIP"] == inv["ipAddress"]: query.append(inv) continue - elif want_c['role'] != 'None' and want_c["seedIP"] == 'None': - if want_c['role'] == inv['switchRole'].replace(" ", "_"): + elif want_c["role"] != "None" and want_c["seedIP"] == "None": + if want_c["role"] == inv["switchRole"].replace(" ", "_"): query.append(inv) continue else: - if want_c["seedIP"] == inv['ipAddress'] and \ - want_c['role'] == inv['switchRole'].replace(" ", "_"): + if want_c["seedIP"] == inv["ipAddress"] and want_c[ + "role" + ] == inv["switchRole"].replace(" ", "_"): query.append(inv) continue else: - for inv in inv_objects['DATA']: + for inv in inv_objects["DATA"]: query.append(inv) self.query = query @@ -945,21 +1033,21 @@ def handle_response(self, res, op): fail = False changed = True - if op == 'query_dcnm': + if op == "query_dcnm": # This if blocks handles responses to the query APIs against DCNM. # Basically all GET operations. - if res.get('ERROR') == 'Not Found' and res['RETURN_CODE'] == 404: + if res.get("ERROR") == "Not Found" and res["RETURN_CODE"] == 404: return True, False - if res['RETURN_CODE'] != 200 or res['MESSAGE'] != 'OK': + if res["RETURN_CODE"] != 200 or res["MESSAGE"] != "OK": return False, True return False, False # Responses to all other operations POST and PUT are handled here. - if res.get('MESSAGE') != 'OK': + if res.get("MESSAGE") != "OK": fail = True changed = False return fail, changed - if res.get('ERROR'): + if res.get("ERROR"): fail = True changed = False @@ -969,72 +1057,84 @@ def failure(self, resp): res = copy.deepcopy(resp) - if not resp.get('DATA'): - data = copy.deepcopy(resp.get('DATA')) - if data.get('stackTrace'): - data.update({'stackTrace': 'Stack trace is hidden, use \'-vvvvv\' to print it'}) - res.update({'DATA': data}) + if not resp.get("DATA"): + data = copy.deepcopy(resp.get("DATA")) + if data.get("stackTrace"): + data.update( + {"stackTrace": "Stack trace is hidden, use '-vvvvv' to print it"} + ) + res.update({"DATA": data}) self.module.fail_json(msg=res) def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - fabric=dict(required=True, type='str'), - config=dict(required=False, type='list', elements='dict'), - state=dict(default='merged', - choices=['merged', 'overridden', 'deleted', 'query']) + fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + default="merged", choices=["merged", "overridden", "deleted", "query"] + ), ) - module = AnsibleModule(argument_spec=element_spec, - supports_check_mode=True) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_inv = DcnmInventory(module) dcnm_inv.validate_input() dcnm_inv.get_want() dcnm_inv.get_have() - if module.params['state'] == 'merged': + if module.params["state"] == "merged": dcnm_inv.get_diff_merge() - if module.params['state'] == 'overridden': + if module.params["state"] == "overridden": dcnm_inv.get_diff_override() - if module.params['state'] == 'deleted': + if module.params["state"] == "deleted": dcnm_inv.get_diff_delete() - if module.params['state'] == 'query': + if module.params["state"] == "query": dcnm_inv.get_diff_query() - dcnm_inv.result['changed'] = False - dcnm_inv.result['response'] = dcnm_inv.query + dcnm_inv.result["changed"] = False + dcnm_inv.result["response"] = dcnm_inv.query - if not dcnm_inv.diff_delete and module.params['state'] == 'deleted': - dcnm_inv.result['changed'] = False - dcnm_inv.result['response'] = "The switch provided is not part of the fabric and cannot be deleted" - - if not dcnm_inv.diff_create and module.params['state'] == 'merged': - dcnm_inv.result['changed'] = False - dcnm_inv.result['response'] = "The switch provided is already part of the fabric and cannot be created again" + if not dcnm_inv.diff_delete and module.params["state"] == "deleted": + dcnm_inv.result["changed"] = False + dcnm_inv.result[ + "response" + ] = "The switch provided is not part of the fabric and cannot be deleted" - if not dcnm_inv.diff_create and not dcnm_inv.diff_delete and module.params['state'] == 'overridden': - dcnm_inv.result['changed'] = False + if not dcnm_inv.diff_create and module.params["state"] == "merged": + dcnm_inv.result["changed"] = False dcnm_inv.result[ - 'response'] = "The switch provided is already part of the fabric and there is no more device to delete in the fabric" + "response" + ] = "The switch provided is already part of the fabric and cannot be created again" + + if ( + not dcnm_inv.diff_create + and not dcnm_inv.diff_delete + and module.params["state"] == "overridden" + ): + dcnm_inv.result["changed"] = False + dcnm_inv.result[ + "response" + ] = "The switch provided is already part of the fabric and there is no more device to delete in the fabric" - if not dcnm_inv.query and module.params['state'] == 'query': - dcnm_inv.result['changed'] = False - dcnm_inv.result['response'] = "The queried switch is not part of the fabric configured" + if not dcnm_inv.query and module.params["state"] == "query": + dcnm_inv.result["changed"] = False + dcnm_inv.result[ + "response" + ] = "The queried switch is not part of the fabric configured" if dcnm_inv.diff_create or dcnm_inv.diff_delete: - dcnm_inv.result['changed'] = True + dcnm_inv.result["changed"] = True else: module.exit_json(**dcnm_inv.result) if module.check_mode: - dcnm_inv.result['changed'] = False + dcnm_inv.result["changed"] = False module.exit_json(**dcnm_inv.result) # Delete Switch @@ -1062,13 +1162,15 @@ def main(): if not dcnm_inv.all_switches_ok(): time.sleep(5) continue - else: - break + + break # Step 4 # Verify all devices came up finally if not dcnm_inv.all_switches_ok(): - msg = "Failed to import all switches into fabric: {}".format(dcnm_inv.fabric) + msg = "Failed to import all switches into fabric: {0}".format( + dcnm_inv.fabric + ) module.fail_json(msg=msg) # Step 5 @@ -1090,5 +1192,5 @@ def main(): module.exit_json(**dcnm_inv.result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_network.py b/plugins/modules/dcnm_network.py index fa0ba6aeb..d747bff6a 100644 --- a/plugins/modules/dcnm_network.py +++ b/plugins/modules/dcnm_network.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020-2021 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Chris Van Heuveln, Shrishail Kariyappanavar, Karthik Babu Harichandra Babu, Praveen Ramoorthy" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_network short_description: Add and remove Networks from a DCNM managed VXLAN fabric. @@ -155,9 +157,10 @@ type: str required: false dhcp_loopback_id: - description: 'Loopback ID for DHCP Relay interface' + description: + - Loopback ID for DHCP Relay interface + - Configured ID value should be in range 0-1023 type: int - note: Configured ID value should be in range 0-1023 required: false attach: description: @@ -185,9 +188,9 @@ - Global knob to control whether to deploy the attachment type: bool default: true -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # This module supports the following states: # # Merged: @@ -371,15 +374,23 @@ config: - net_name: ansible-net13 - net_name: ansible-net12 -''' +""" import json import time import copy import re -from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import get_fabric_inventory_details, \ - dcnm_send, validate_list_of_dicts, dcnm_get_ip_addr_info, get_ip_sn_dict, get_fabric_details, \ - get_ip_sn_fabric_dict, dcnm_version_supported, dcnm_get_url +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + get_fabric_inventory_details, + dcnm_send, + validate_list_of_dicts, + dcnm_get_ip_addr_info, + get_ip_sn_dict, + get_fabric_details, + get_ip_sn_fabric_dict, + dcnm_version_supported, + dcnm_get_url, +) from ansible.module_utils.connection import Connection from ansible.module_utils.basic import AnsibleModule @@ -394,8 +405,8 @@ class DcnmNetwork: "GET_NET_ID": "/rest/managed-pool/fabrics/{}/segments/ids", "GET_NET": "/rest/top-down/fabrics/{}/networks", "GET_NET_NAME": "/rest/top-down/fabrics/{}/networks/{}", - "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN" - }, + "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", + }, 12: { "GET_VRF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", "GET_VRF_NET": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks?vrf-name={}", @@ -403,15 +414,15 @@ class DcnmNetwork: "GET_NET_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/netinfo", "GET_NET": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks", "GET_NET_NAME": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks/{}", - "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN" - } + "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", + }, } def __init__(self, module): self.module = module self.params = module.params - self.fabric = module.params['fabric'] - self.config = copy.deepcopy(module.params.get('config')) + self.fabric = module.params["fabric"] + self.config = copy.deepcopy(module.params.get("config")) self.check_mode = False self.conn = Connection(module._socket_path) self.have_create = [] @@ -445,18 +456,15 @@ def __init__(self, module): self.ip_sn, self.hn_sn = get_ip_sn_dict(self.inventory_data) self.ip_fab, self.sn_fab = get_ip_sn_fabric_dict(self.inventory_data) self.fabric_det = get_fabric_details(module, self.fabric) - self.is_ms_fabric = True if self.fabric_det.get('fabricType') == 'MFD' else False + self.is_ms_fabric = ( + True if self.fabric_det.get("fabricType") == "MFD" else False + ) if self.dcnm_version > 12: self.paths = self.dcnm_network_paths[12] else: self.paths = self.dcnm_network_paths[self.dcnm_version] - self.result = dict( - changed=False, - diff=[], - response=[], - warnings=[] - ) + self.result = dict(changed=False, diff=[], response=[], warnings=[]) self.failed_to_rollback = False self.WAIT_TIME_FOR_DELETE_LOOP = 5 # in seconds @@ -473,35 +481,60 @@ def diff_for_attach_deploy(self, want_a, have_a, replace=False): found = False if have_a: for have in have_a: - if want['serialNumber'] == have['serialNumber']: + if want["serialNumber"] == have["serialNumber"]: found = True - if want.get('isAttached') is not None: - if bool(have['isAttached']) and bool(want['isAttached']): - h_sw_ports = have['switchPorts'].split(",") if have['switchPorts'] else [] - w_sw_ports = want['switchPorts'].split(",") if want['switchPorts'] else [] + if want.get("isAttached") is not None: + if bool(have["isAttached"]) and bool(want["isAttached"]): + h_sw_ports = ( + have["switchPorts"].split(",") + if have["switchPorts"] + else [] + ) + w_sw_ports = ( + want["switchPorts"].split(",") + if want["switchPorts"] + else [] + ) # This is needed to handle cases where vlan is updated after deploying the network # and attachments. This ensures that the attachments before vlan update will use previous # vlan id. All the active attachments on DCNM will have a vlan-id. - if have.get('vlan'): - want['vlan'] = have.get('vlan') + if have.get("vlan"): + want["vlan"] = have.get("vlan") if sorted(h_sw_ports) != sorted(w_sw_ports): - atch_sw_ports = list(set(w_sw_ports) - set(h_sw_ports)) + atch_sw_ports = list( + set(w_sw_ports) - set(h_sw_ports) + ) # Adding some logic which is needed for replace and override. if replace: - dtach_sw_ports = list(set(h_sw_ports) - set(w_sw_ports)) + dtach_sw_ports = list( + set(h_sw_ports) - set(w_sw_ports) + ) if not atch_sw_ports and not dtach_sw_ports: continue - want.update({'switchPorts': ','.join(atch_sw_ports) if atch_sw_ports else ""}) want.update( - {'detachSwitchPorts': ','.join(dtach_sw_ports) if dtach_sw_ports else ""}) - - del want['isAttached'] + { + "switchPorts": ",".join(atch_sw_ports) + if atch_sw_ports + else "" + } + ) + want.update( + { + "detachSwitchPorts": ",".join( + dtach_sw_ports + ) + if dtach_sw_ports + else "" + } + ) + + del want["isAttached"] attach_list.append(want) continue @@ -510,33 +543,35 @@ def diff_for_attach_deploy(self, want_a, have_a, replace=False): # The attachments in the have consist of attachments in want and more. continue - want.update({'switchPorts': ','.join(atch_sw_ports)}) - del want['isAttached'] + want.update( + {"switchPorts": ",".join(atch_sw_ports)} + ) + del want["isAttached"] attach_list.append(want) continue - if bool(have['isAttached']) is not bool(want['isAttached']): + if bool(have["isAttached"]) is not bool(want["isAttached"]): # When the attachment is to be detached and undeployed, ignore any changes # to the attach section in the want(i.e in the playbook). - if not bool(want['isAttached']): - del have['isAttached'] - have.update({'deployment': False}) + if not bool(want["isAttached"]): + del have["isAttached"] + have.update({"deployment": False}) attach_list.append(have) continue - del want['isAttached'] + del want["isAttached"] attach_list.append(want) continue - if bool(have['deployment']) is not bool(want['deployment']): + if bool(have["deployment"]) is not bool(want["deployment"]): # We hit this section when attachment is successful, but, deployment is stuck in PENDING or # OUT-OF-SYNC. In such cases, we just add the object to deploy list only. have['deployment'] # is set to False when deployment is PENDING or OUT-OF-SYNC - ref - get_have() dep_net = True if not found: - if bool(want['deployment']): - del want['isAttached'] + if bool(want["deployment"]): + del want["isAttached"] attach_list.append(want) return attach_list, dep_net @@ -547,37 +582,46 @@ def update_attach_params(self, attach, net_name, deploy): return {} serial = "" - attach['ip_address'] = dcnm_get_ip_addr_info(self.module, attach['ip_address'], None, None) + attach["ip_address"] = dcnm_get_ip_addr_info( + self.module, attach["ip_address"], None, None + ) for ip, ser in self.ip_sn.items(): - if ip == attach['ip_address']: + if ip == attach["ip_address"]: serial = ser if not serial: - self.module.fail_json(msg='Fabric: {} does not have the switch: {}' - .format(self.fabric, attach['ip_address'])) + self.module.fail_json( + msg="Fabric: {0} does not have the switch: {1}".format( + self.fabric, attach["ip_address"] + ) + ) - role = self.inventory_data[attach['ip_address']].get('switchRole') - if role.lower() == 'spine' or role.lower() == 'super spine': - msg = 'Networks cannot be attached to switch {} with role {}'.format(attach['ip_address'], role) + role = self.inventory_data[attach["ip_address"]].get("switchRole") + if role.lower() == "spine" or role.lower() == "super spine": + msg = "Networks cannot be attached to switch {0} with role {1}".format( + attach["ip_address"], role + ) self.module.fail_json(msg=msg) - attach.update({'fabric': self.fabric}) - attach.update({'networkName': net_name}) - attach.update({'serialNumber': serial}) - attach.update({'switchPorts': ','.join(attach['ports'])}) - attach.update({'detachSwitchPorts': ""}) # Is this supported??Need to handle correct - attach.update({'vlan': 0}) - attach.update({'dot1QVlan': 0}) - attach.update({'untagged': False}) - attach.update({'deployment': deploy}) - attach.update({'isAttached': deploy}) - attach.update({'extensionValues': ""}) - attach.update({'instanceValues': ""}) - attach.update({'freeformConfig': ""}) - if 'deploy' in attach: - del attach['deploy'] - del attach['ports'] - del attach['ip_address'] + attach.update({"fabric": self.fabric}) + attach.update({"networkName": net_name}) + attach.update({"serialNumber": serial}) + attach.update({"switchPorts": ",".join(attach["ports"])}) + attach.update( + {"detachSwitchPorts": ""} + ) # Is this supported??Need to handle correct + attach.update({"vlan": 0}) + attach.update({"dot1QVlan": 0}) + attach.update({"untagged": False}) + attach.update({"deployment": deploy}) + attach.update({"isAttached": deploy}) + attach.update({"extensionValues": ""}) + attach.update({"instanceValues": ""}) + attach.update({"freeformConfig": ""}) + if "deploy" in attach: + del attach["deploy"] + del attach["ports"] + del attach["ip_address"] return attach @@ -607,51 +651,57 @@ def diff_for_create(self, want, have): dhcp3_vrf_changed = False dhcp_loopback_changed = False - if want.get('networkId') and want['networkId'] != have['networkId']: - self.module.fail_json(msg="networkId can not be updated on existing network: {}". - format(want['networkName'])) - - if have['vrf'] != want['vrf']: - self.module.fail_json(msg="The network {} existing already can not change" - " the VRF association from vrf:{} to vrf:{}". - format(want['networkName'], have['vrf'], want['vrf'])) - - json_to_dict_want = json.loads(want['networkTemplateConfig']) - json_to_dict_have = json.loads(have['networkTemplateConfig']) - - gw_ip_want = json_to_dict_want.get('gatewayIpAddress', "") - gw_ip_have = json_to_dict_have.get('gatewayIpAddress', "") - vlanId_want = json_to_dict_want.get('vlanId', "") - vlanId_have = json_to_dict_have.get('vlanId') - l2only_want = json_to_dict_want.get('isLayer2Only', "") - l2only_have = json_to_dict_have.get('isLayer2Only', "") - vlanName_want = json_to_dict_want.get('vlanName', "") - vlanName_have = json_to_dict_have.get('vlanName', "") - intDesc_want = json_to_dict_want.get('intfDescription', "") - intDesc_have = json_to_dict_have.get('intfDescription', "") - mtu_want = json_to_dict_want.get('mtu', "") - mtu_have = json_to_dict_have.get('mtu', "") - arpsup_want = json_to_dict_want.get('suppressArp', "") - arpsup_have = json_to_dict_have.get('suppressArp', "") - dhcp1_ip_want = json_to_dict_want.get('dhcpServerAddr1', "") - dhcp1_ip_want = json_to_dict_want.get('dhcpServerAddr1', "") - dhcp1_ip_have = json_to_dict_have.get('dhcpServerAddr1', "") - dhcp2_ip_want = json_to_dict_want.get('dhcpServerAddr2', "") - dhcp2_ip_have = json_to_dict_have.get('dhcpServerAddr2', "") - dhcp3_ip_want = json_to_dict_want.get('dhcpServerAddr3', "") - dhcp3_ip_have = json_to_dict_have.get('dhcpServerAddr3', "") - dhcp1_vrf_want = json_to_dict_want.get('vrfDhcp', "") - dhcp1_vrf_have = json_to_dict_have.get('vrfDhcp', "") - dhcp2_vrf_want = json_to_dict_want.get('vrfDhcp2', "") - dhcp2_vrf_have = json_to_dict_have.get('vrfDhcp2', "") - dhcp3_vrf_want = json_to_dict_want.get('vrfDhcp3', "") - dhcp3_vrf_have = json_to_dict_have.get('vrfDhcp3', "") - dhcp_loopback_want = json_to_dict_want.get('loopbackId', "") - dhcp_loopback_have = json_to_dict_have.get('loopbackId', "") + if want.get("networkId") and want["networkId"] != have["networkId"]: + self.module.fail_json( + msg="networkId can not be updated on existing network: {0}".format( + want["networkName"] + ) + ) + + if have["vrf"] != want["vrf"]: + self.module.fail_json( + msg="The network {0} existing already can not change" + " the VRF association from vrf:{1} to vrf:{2}".format( + want["networkName"], have["vrf"], want["vrf"] + ) + ) + + json_to_dict_want = json.loads(want["networkTemplateConfig"]) + json_to_dict_have = json.loads(have["networkTemplateConfig"]) + + gw_ip_want = json_to_dict_want.get("gatewayIpAddress", "") + gw_ip_have = json_to_dict_have.get("gatewayIpAddress", "") + vlanId_want = json_to_dict_want.get("vlanId", "") + vlanId_have = json_to_dict_have.get("vlanId") + l2only_want = json_to_dict_want.get("isLayer2Only", "") + l2only_have = json_to_dict_have.get("isLayer2Only", "") + vlanName_want = json_to_dict_want.get("vlanName", "") + vlanName_have = json_to_dict_have.get("vlanName", "") + intDesc_want = json_to_dict_want.get("intfDescription", "") + intDesc_have = json_to_dict_have.get("intfDescription", "") + mtu_want = json_to_dict_want.get("mtu", "") + mtu_have = json_to_dict_have.get("mtu", "") + arpsup_want = json_to_dict_want.get("suppressArp", "") + arpsup_have = json_to_dict_have.get("suppressArp", "") + dhcp1_ip_want = json_to_dict_want.get("dhcpServerAddr1", "") + dhcp1_ip_want = json_to_dict_want.get("dhcpServerAddr1", "") + dhcp1_ip_have = json_to_dict_have.get("dhcpServerAddr1", "") + dhcp2_ip_want = json_to_dict_want.get("dhcpServerAddr2", "") + dhcp2_ip_have = json_to_dict_have.get("dhcpServerAddr2", "") + dhcp3_ip_want = json_to_dict_want.get("dhcpServerAddr3", "") + dhcp3_ip_have = json_to_dict_have.get("dhcpServerAddr3", "") + dhcp1_vrf_want = json_to_dict_want.get("vrfDhcp", "") + dhcp1_vrf_have = json_to_dict_have.get("vrfDhcp", "") + dhcp2_vrf_want = json_to_dict_want.get("vrfDhcp2", "") + dhcp2_vrf_have = json_to_dict_have.get("vrfDhcp2", "") + dhcp3_vrf_want = json_to_dict_want.get("vrfDhcp3", "") + dhcp3_vrf_have = json_to_dict_have.get("vrfDhcp3", "") + dhcp_loopback_want = json_to_dict_want.get("loopbackId", "") + dhcp_loopback_have = json_to_dict_have.get("loopbackId", "") if vlanId_have != "": vlanId_have = int(vlanId_have) - tag_want = json_to_dict_want.get('tag', "") - tag_have = json_to_dict_have.get('tag') + tag_want = json_to_dict_want.get("tag", "") + tag_have = json_to_dict_have.get("tag") if tag_have != "": tag_have = int(tag_have) if mtu_have != "": @@ -667,21 +717,30 @@ def diff_for_create(self, want, have): if vlanId_want: - if have['networkTemplate'] != want['networkTemplate'] or \ - have['networkExtensionTemplate'] != want['networkExtensionTemplate'] or \ - gw_ip_have != gw_ip_want or vlanId_have != vlanId_want or \ - tag_have != tag_want or l2only_have != l2only_want or \ - vlanName_have != vlanName_want or intDesc_have != intDesc_want or \ - mtu_have != mtu_want or arpsup_have != arpsup_want or \ - dhcp1_ip_have != dhcp1_ip_want or dhcp2_ip_have != dhcp2_ip_want or \ - dhcp3_ip_have != dhcp3_ip_want or dhcp1_vrf_have != dhcp1_vrf_want or \ - dhcp2_vrf_have != dhcp2_vrf_want or dhcp3_vrf_have != dhcp3_vrf_want or \ - dhcp_loopback_have != dhcp_loopback_want: + if ( + have["networkTemplate"] != want["networkTemplate"] + or have["networkExtensionTemplate"] != want["networkExtensionTemplate"] + or gw_ip_have != gw_ip_want + or vlanId_have != vlanId_want + or tag_have != tag_want + or l2only_have != l2only_want + or vlanName_have != vlanName_want + or intDesc_have != intDesc_want + or mtu_have != mtu_want + or arpsup_have != arpsup_want + or dhcp1_ip_have != dhcp1_ip_want + or dhcp2_ip_have != dhcp2_ip_want + or dhcp3_ip_have != dhcp3_ip_want + or dhcp1_vrf_have != dhcp1_vrf_want + or dhcp2_vrf_have != dhcp2_vrf_want + or dhcp3_vrf_have != dhcp3_vrf_want + or dhcp_loopback_have != dhcp_loopback_want + ): # The network updates with missing networkId will have to use existing # networkId from the instance of the same network on DCNM. if vlanId_have != vlanId_want: - warn_msg = 'The VLAN change will effect only new attachments.' + warn_msg = "The VLAN change will effect only new attachments." if gw_ip_have != gw_ip_want: gw_changed = True @@ -712,20 +771,28 @@ def diff_for_create(self, want, have): if dhcp_loopback_have != dhcp_loopback_want: dhcp_loopback_changed = True - want.update({'networkId': have['networkId']}) + want.update({"networkId": have["networkId"]}) create = want else: - if have['networkTemplate'] != want['networkTemplate'] or \ - have['networkExtensionTemplate'] != want['networkExtensionTemplate'] or \ - gw_ip_have != gw_ip_want or tag_have != tag_want or \ - l2only_have != l2only_want or vlanName_have != vlanName_want or \ - intDesc_have != intDesc_want or mtu_have != mtu_want or \ - arpsup_have != arpsup_want or dhcp1_ip_have != dhcp1_ip_want or \ - dhcp2_ip_have != dhcp2_ip_want or dhcp3_ip_have != dhcp3_ip_want or \ - dhcp1_vrf_have != dhcp1_vrf_want or dhcp2_vrf_have != dhcp2_vrf_want or \ - dhcp3_vrf_have != dhcp3_vrf_want: + if ( + have["networkTemplate"] != want["networkTemplate"] + or have["networkExtensionTemplate"] != want["networkExtensionTemplate"] + or gw_ip_have != gw_ip_want + or tag_have != tag_want + or l2only_have != l2only_want + or vlanName_have != vlanName_want + or intDesc_have != intDesc_want + or mtu_have != mtu_want + or arpsup_have != arpsup_want + or dhcp1_ip_have != dhcp1_ip_want + or dhcp2_ip_have != dhcp2_ip_want + or dhcp3_ip_have != dhcp3_ip_want + or dhcp1_vrf_have != dhcp1_vrf_want + or dhcp2_vrf_have != dhcp2_vrf_want + or dhcp3_vrf_have != dhcp3_vrf_want + ): # The network updates with missing networkId will have to use existing # networkId from the instance of the same network on DCNM. @@ -758,79 +825,100 @@ def diff_for_create(self, want, have): if dhcp_loopback_have != dhcp_loopback_want: dhcp_loopback_changed = True - want.update({'networkId': have['networkId']}) + want.update({"networkId": have["networkId"]}) create = want - return create, gw_changed, tg_changed, warn_msg, l2only_changed, vn_changed, \ - intdesc_changed, mtu_changed, arpsup_changed, dhcp1_ip_changed, dhcp2_ip_changed, \ - dhcp3_ip_changed, dhcp1_vrf_changed, dhcp2_vrf_changed, dhcp3_vrf_changed, dhcp_loopback_changed + return ( + create, + gw_changed, + tg_changed, + warn_msg, + l2only_changed, + vn_changed, + intdesc_changed, + mtu_changed, + arpsup_changed, + dhcp1_ip_changed, + dhcp2_ip_changed, + dhcp3_ip_changed, + dhcp1_vrf_changed, + dhcp2_vrf_changed, + dhcp3_vrf_changed, + dhcp_loopback_changed, + ) def update_create_params(self, net): if not net: return net - state = self.params['state'] + state = self.params["state"] - n_template = net.get('net_template', 'Default_Network_Universal') - ne_template = net.get('net_extension_template', 'Default_Network_Extension_Universal') + n_template = net.get("net_template", "Default_Network_Universal") + ne_template = net.get( + "net_extension_template", "Default_Network_Extension_Universal" + ) - if state == 'deleted': + if state == "deleted": net_upd = { - 'fabric': self.fabric, - 'networkName': net['net_name'], - 'networkId': net.get('net_id', None), # Network id will be auto generated in get_diff_merge() - 'networkTemplate': n_template, - 'networkExtensionTemplate': ne_template, + "fabric": self.fabric, + "networkName": net["net_name"], + "networkId": net.get( + "net_id", None + ), # Network id will be auto generated in get_diff_merge() + "networkTemplate": n_template, + "networkExtensionTemplate": ne_template, } else: net_upd = { - 'fabric': self.fabric, - 'vrf': net['vrf_name'], - 'networkName': net['net_name'], - 'networkId': net.get('net_id', None), # Network id will be auto generated in get_diff_merge() - 'networkTemplate': n_template, - 'networkExtensionTemplate': ne_template, + "fabric": self.fabric, + "vrf": net["vrf_name"], + "networkName": net["net_name"], + "networkId": net.get( + "net_id", None + ), # Network id will be auto generated in get_diff_merge() + "networkTemplate": n_template, + "networkExtensionTemplate": ne_template, } template_conf = { - 'vlanId': net.get('vlan_id'), - 'gatewayIpAddress': net.get('gw_ip_subnet', ""), - 'isLayer2Only': net.get('is_l2only', False), - 'tag': net.get('routing_tag'), - 'vlanName': net.get('vlan_name', ""), - 'intfDescription': net.get('int_desc', ""), - 'mtu': net.get('mtu_l3intf', ""), - 'suppressArp': net.get('arp_suppress', False), - 'dhcpServerAddr1': net.get('dhcp_srvr1_ip', ""), - 'dhcpServerAddr2': net.get('dhcp_srvr2_ip', ""), - 'dhcpServerAddr3': net.get('dhcp_srvr3_ip', ""), - 'vrfDhcp': net.get('dhcp_srvr1_vrf', ""), - 'vrfDhcp2': net.get('dhcp_srvr2_vrf', ""), - 'vrfDhcp3': net.get('dhcp_srvr3_vrf', ""), - 'loopbackId': net.get('dhcp_loopback_id', "") + "vlanId": net.get("vlan_id"), + "gatewayIpAddress": net.get("gw_ip_subnet", ""), + "isLayer2Only": net.get("is_l2only", False), + "tag": net.get("routing_tag"), + "vlanName": net.get("vlan_name", ""), + "intfDescription": net.get("int_desc", ""), + "mtu": net.get("mtu_l3intf", ""), + "suppressArp": net.get("arp_suppress", False), + "dhcpServerAddr1": net.get("dhcp_srvr1_ip", ""), + "dhcpServerAddr2": net.get("dhcp_srvr2_ip", ""), + "dhcpServerAddr3": net.get("dhcp_srvr3_ip", ""), + "vrfDhcp": net.get("dhcp_srvr1_vrf", ""), + "vrfDhcp2": net.get("dhcp_srvr2_vrf", ""), + "vrfDhcp3": net.get("dhcp_srvr3_vrf", ""), + "loopbackId": net.get("dhcp_loopback_id", ""), } - if template_conf['vlanId'] is None: - template_conf['vlanId'] = "" - if template_conf['tag'] is None: - template_conf['tag'] = "" - if template_conf['vlanName'] is None: - template_conf['vlanName'] = "" - if template_conf['intfDescription'] is None: - template_conf['intfDescription'] = "" - if template_conf['mtu'] is None: - template_conf['mtu'] = "" - if template_conf['vrfDhcp'] is None: - template_conf['vrfDhcp'] = "" - if template_conf['vrfDhcp2'] is None: - template_conf['vrfDhcp2'] = "" - if template_conf['vrfDhcp3'] is None: - template_conf['vrfDhcp3'] = "" - if template_conf['loopbackId'] is None: - template_conf['loopbackId'] = "" - - net_upd.update({'networkTemplateConfig': json.dumps(template_conf)}) + if template_conf["vlanId"] is None: + template_conf["vlanId"] = "" + if template_conf["tag"] is None: + template_conf["tag"] = "" + if template_conf["vlanName"] is None: + template_conf["vlanName"] = "" + if template_conf["intfDescription"] is None: + template_conf["intfDescription"] = "" + if template_conf["mtu"] is None: + template_conf["mtu"] = "" + if template_conf["vrfDhcp"] is None: + template_conf["vrfDhcp"] = "" + if template_conf["vrfDhcp2"] is None: + template_conf["vrfDhcp2"] = "" + if template_conf["vrfDhcp3"] is None: + template_conf["vrfDhcp3"] = "" + if template_conf["loopbackId"] is None: + template_conf["loopbackId"] = "" + + net_upd.update({"networkTemplateConfig": json.dumps(template_conf)}) return net_upd @@ -844,181 +932,196 @@ def get_have(self): l2only_configured = False - state = self.params['state'] + state = self.params["state"] - method = 'GET' + method = "GET" path = self.paths["GET_VRF"].format(self.fabric) vrf_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(vrf_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find VRFs under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find VRFs under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return - if not state == 'deleted' and not state == 'query': + if not state == "deleted" and not state == "query": if self.config: for net in self.config: vrf_found = False - vrf_missing = net.get('vrf_name', 'NA') - if (vrf_missing == 'NA' or vrf_missing == "") and net.get('is_l2only', False) is True: + vrf_missing = net.get("vrf_name", "NA") + if (vrf_missing == "NA" or vrf_missing == "") and net.get( + "is_l2only", False + ) is True: # set vrf_missing to NA again as it can be "" vrf_missing = "NA" vrf_found = True l2only_configured = True continue - if vrf_objects['DATA']: - for vrf in vrf_objects['DATA']: - if vrf_missing == vrf['vrfName']: + if vrf_objects["DATA"]: + for vrf in vrf_objects["DATA"]: + if vrf_missing == vrf["vrfName"]: vrf_found = True break if not vrf_found: - self.module.fail_json(msg="VRF: {} is missing in fabric: {}".format(vrf_missing, self.fabric)) + self.module.fail_json( + msg="VRF: {0} is missing in fabric: {1}".format( + vrf_missing, self.fabric + ) + ) - for vrf in vrf_objects['DATA']: + for vrf in vrf_objects["DATA"]: - path = self.paths["GET_VRF_NET"].format(self.fabric, vrf['vrfName']) + path = self.paths["GET_VRF_NET"].format(self.fabric, vrf["vrfName"]) networks_per_vrf = dcnm_send(self.module, method, path) - if not networks_per_vrf['DATA']: + if not networks_per_vrf["DATA"]: continue - for net in networks_per_vrf['DATA']: - json_to_dict = json.loads(net['networkTemplateConfig']) + for net in networks_per_vrf["DATA"]: + json_to_dict = json.loads(net["networkTemplateConfig"]) t_conf = { - 'vlanId': json_to_dict.get('vlanId', ""), - 'gatewayIpAddress': json_to_dict.get('gatewayIpAddress', ""), - 'isLayer2Only': json_to_dict.get('isLayer2Only', False), - 'tag': json_to_dict.get('tag', ""), - 'vlanName': json_to_dict.get('vlanName', ""), - 'intfDescription': json_to_dict.get('intfDescription', ""), - 'mtu': json_to_dict.get('mtu', ""), - 'suppressArp': json_to_dict.get('suppressArp', False), - 'dhcpServerAddr1': json_to_dict.get('dhcpServerAddr1', ""), - 'dhcpServerAddr2': json_to_dict.get('dhcpServerAddr2', ""), - 'dhcpServerAddr3': json_to_dict.get('dhcpServerAddr3', ""), - 'vrfDhcp': json_to_dict.get('vrfDhcp', ""), - 'vrfDhcp2': json_to_dict.get('vrfDhcp2', ""), - 'vrfDhcp3': json_to_dict.get('vrfDhcp3', ""), - 'loopbackId': json_to_dict.get('loopbackId', "") + "vlanId": json_to_dict.get("vlanId", ""), + "gatewayIpAddress": json_to_dict.get("gatewayIpAddress", ""), + "isLayer2Only": json_to_dict.get("isLayer2Only", False), + "tag": json_to_dict.get("tag", ""), + "vlanName": json_to_dict.get("vlanName", ""), + "intfDescription": json_to_dict.get("intfDescription", ""), + "mtu": json_to_dict.get("mtu", ""), + "suppressArp": json_to_dict.get("suppressArp", False), + "dhcpServerAddr1": json_to_dict.get("dhcpServerAddr1", ""), + "dhcpServerAddr2": json_to_dict.get("dhcpServerAddr2", ""), + "dhcpServerAddr3": json_to_dict.get("dhcpServerAddr3", ""), + "vrfDhcp": json_to_dict.get("vrfDhcp", ""), + "vrfDhcp2": json_to_dict.get("vrfDhcp2", ""), + "vrfDhcp3": json_to_dict.get("vrfDhcp3", ""), + "loopbackId": json_to_dict.get("loopbackId", ""), } - net.update({'networkTemplateConfig': json.dumps(t_conf)}) - del net['displayName'] - del net['serviceNetworkTemplate'] - del net['source'] + net.update({"networkTemplateConfig": json.dumps(t_conf)}) + del net["displayName"] + del net["serviceNetworkTemplate"] + del net["source"] - curr_networks.append(net['networkName']) + curr_networks.append(net["networkName"]) have_create.append(net) - if l2only_configured is True or state == 'deleted': + if l2only_configured is True or state == "deleted": path = self.paths["GET_VRF_NET"].format(self.fabric, "NA") networks_per_navrf = dcnm_send(self.module, method, path) - if networks_per_navrf.get('DATA'): - for l2net in networks_per_navrf['DATA']: - json_to_dict = json.loads(l2net['networkTemplateConfig']) + if networks_per_navrf.get("DATA"): + for l2net in networks_per_navrf["DATA"]: + json_to_dict = json.loads(l2net["networkTemplateConfig"]) t_conf = { - 'vlanId': json_to_dict.get('vlanId', ""), - 'gatewayIpAddress': json_to_dict.get('gatewayIpAddress', ""), - 'isLayer2Only': json_to_dict.get('isLayer2Only', False), - 'tag': json_to_dict.get('tag', ""), - 'vlanName': json_to_dict.get('vlanName', ""), - 'intfDescription': json_to_dict.get('intfDescription', ""), - 'mtu': json_to_dict.get('mtu', ""), - 'suppressArp': json_to_dict.get('suppressArp', False), - 'dhcpServerAddr1': json_to_dict.get('dhcpServerAddr1', ""), - 'dhcpServerAddr2': json_to_dict.get('dhcpServerAddr2', ""), - 'dhcpServerAddr3': json_to_dict.get('dhcpServerAddr3', ""), - 'vrfDhcp': json_to_dict.get('vrfDhcp', ""), - 'vrfDhcp2': json_to_dict.get('vrfDhcp2', ""), - 'vrfDhcp3': json_to_dict.get('vrfDhcp3', ""), - 'loopbackId': json_to_dict.get('loopbackId', "") + "vlanId": json_to_dict.get("vlanId", ""), + "gatewayIpAddress": json_to_dict.get("gatewayIpAddress", ""), + "isLayer2Only": json_to_dict.get("isLayer2Only", False), + "tag": json_to_dict.get("tag", ""), + "vlanName": json_to_dict.get("vlanName", ""), + "intfDescription": json_to_dict.get("intfDescription", ""), + "mtu": json_to_dict.get("mtu", ""), + "suppressArp": json_to_dict.get("suppressArp", False), + "dhcpServerAddr1": json_to_dict.get("dhcpServerAddr1", ""), + "dhcpServerAddr2": json_to_dict.get("dhcpServerAddr2", ""), + "dhcpServerAddr3": json_to_dict.get("dhcpServerAddr3", ""), + "vrfDhcp": json_to_dict.get("vrfDhcp", ""), + "vrfDhcp2": json_to_dict.get("vrfDhcp2", ""), + "vrfDhcp3": json_to_dict.get("vrfDhcp3", ""), + "loopbackId": json_to_dict.get("loopbackId", ""), } - l2net.update({'networkTemplateConfig': json.dumps(t_conf)}) - del l2net['displayName'] - del l2net['serviceNetworkTemplate'] - del l2net['source'] + l2net.update({"networkTemplateConfig": json.dumps(t_conf)}) + del l2net["displayName"] + del l2net["serviceNetworkTemplate"] + del l2net["source"] - curr_networks.append(l2net['networkName']) + curr_networks.append(l2net["networkName"]) have_create.append(l2net) if not curr_networks: return - net_attach_objects = dcnm_get_url(self.module, self.fabric, self.paths["GET_NET_ATTACH"], ','.join(curr_networks), "networks") + net_attach_objects = dcnm_get_url( + self.module, + self.fabric, + self.paths["GET_NET_ATTACH"], + ",".join(curr_networks), + "networks", + ) - if not net_attach_objects['DATA']: + if not net_attach_objects["DATA"]: return - for net_attach in net_attach_objects['DATA']: - if not net_attach.get('lanAttachList'): + for net_attach in net_attach_objects["DATA"]: + if not net_attach.get("lanAttachList"): continue - attach_list = net_attach['lanAttachList'] - dep_net = '' + attach_list = net_attach["lanAttachList"] + dep_net = "" for attach in attach_list: - attach_state = False if attach['lanAttachState'] == "NA" else True - deploy = attach['isLanAttached'] - if bool(deploy) and (attach['lanAttachState'] == "OUT-OF-SYNC" or attach['lanAttachState'] == "PENDING"): + attach_state = False if attach["lanAttachState"] == "NA" else True + deploy = attach["isLanAttached"] + if bool(deploy) and ( + attach["lanAttachState"] == "OUT-OF-SYNC" + or attach["lanAttachState"] == "PENDING" + ): deploy = False if bool(deploy): - dep_net = attach['networkName'] + dep_net = attach["networkName"] - sn = attach['switchSerialNo'] - vlan = attach['vlanId'] - ports = attach['portNames'] + sn = attach["switchSerialNo"] + vlan = attach["vlanId"] + ports = attach["portNames"] # The deletes and updates below are done to update the incoming dictionary format to # match to what the outgoing payload requirements mandate. # Ex: 'vlanId' in the attach section of incoming payload needs to be changed to 'vlan' # on the attach section of outgoing payload. - del attach['vlanId'] - del attach['switchSerialNo'] - del attach['switchName'] - del attach['switchRole'] - del attach['ipAddress'] - del attach['lanAttachState'] - del attach['isLanAttached'] - del attach['fabricName'] - del attach['portNames'] - del attach['switchDbId'] - del attach['networkId'] - - if 'displayName' in attach.keys(): - del attach['displayName'] - if 'interfaceGroups' in attach.keys(): - del attach['interfaceGroups'] - - attach.update({'fabric': self.fabric}) - attach.update({'vlan': vlan}) - attach.update({'serialNumber': sn}) - attach.update({'deployment': deploy}) - attach.update({'extensionValues': ""}) - attach.update({'instanceValues': ""}) - attach.update({'freeformConfig': ""}) - attach.update({'isAttached': attach_state}) - attach.update({'dot1QVlan': 0}) - attach.update({'detachSwitchPorts': ""}) - attach.update({'switchPorts': ports}) - attach.update({'untagged': False}) + del attach["vlanId"] + del attach["switchSerialNo"] + del attach["switchName"] + del attach["switchRole"] + del attach["ipAddress"] + del attach["lanAttachState"] + del attach["isLanAttached"] + del attach["fabricName"] + del attach["portNames"] + del attach["switchDbId"] + del attach["networkId"] + + if "displayName" in attach.keys(): + del attach["displayName"] + if "interfaceGroups" in attach.keys(): + del attach["interfaceGroups"] + + attach.update({"fabric": self.fabric}) + attach.update({"vlan": vlan}) + attach.update({"serialNumber": sn}) + attach.update({"deployment": deploy}) + attach.update({"extensionValues": ""}) + attach.update({"instanceValues": ""}) + attach.update({"freeformConfig": ""}) + attach.update({"isAttached": attach_state}) + attach.update({"dot1QVlan": 0}) + attach.update({"detachSwitchPorts": ""}) + attach.update({"switchPorts": ports}) + attach.update({"untagged": False}) if dep_net: dep_networks.append(dep_net) - have_attach = net_attach_objects['DATA'] + have_attach = net_attach_objects["DATA"] if dep_networks: - have_deploy.update({'networkNames': ','.join(dep_networks)}) + have_deploy.update({"networkNames": ",".join(dep_networks)}) self.have_create = have_create self.have_attach = have_attach @@ -1039,26 +1142,26 @@ def get_want(self): net_attach = {} networks = [] - net_deploy = net.get('deploy', True) + net_deploy = net.get("deploy", True) want_create.append(self.update_create_params(net)) - if not net.get('attach'): + if not net.get("attach"): continue - for attach in net['attach']: - deploy = net_deploy if "deploy" not in attach else attach['deploy'] - networks.append(self.update_attach_params(attach, - net['net_name'], - deploy)) + for attach in net["attach"]: + deploy = net_deploy if "deploy" not in attach else attach["deploy"] + networks.append( + self.update_attach_params(attach, net["net_name"], deploy) + ) if networks: - net_attach.update({'networkName': net['net_name']}) - net_attach.update({'lanAttachList': networks}) + net_attach.update({"networkName": net["net_name"]}) + net_attach.update({"lanAttachList": networks}) want_attach.append(net_attach) - all_networks += net['net_name'] + "," + all_networks += net["net_name"] + "," if all_networks: - want_deploy.update({'networkNames': all_networks[:-1]}) + want_deploy.update({"networkNames": all_networks[:-1]}) self.want_create = want_create self.want_attach = want_attach @@ -1070,51 +1173,65 @@ def get_diff_delete(self): diff_undeploy = {} diff_delete = {} - all_nets = '' + all_nets = "" if self.config: for want_c in self.want_create: - if not next((have_c for have_c in self.have_create if have_c['networkName'] == want_c['networkName']), None): + if not next( + ( + have_c + for have_c in self.have_create + if have_c["networkName"] == want_c["networkName"] + ), + None, + ): continue - diff_delete.update({want_c['networkName']: 'DEPLOYED'}) - - have_a = next((attach for attach in self.have_attach if attach['networkName'] == want_c['networkName']), None) + diff_delete.update({want_c["networkName"]: "DEPLOYED"}) + + have_a = next( + ( + attach + for attach in self.have_attach + if attach["networkName"] == want_c["networkName"] + ), + None, + ) if not have_a: continue to_del = [] - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_nets += have_a['networkName'] + "," + all_nets += have_a["networkName"] + "," if all_nets: - diff_undeploy.update({'networkNames': all_nets[:-1]}) + diff_undeploy.update({"networkNames": all_nets[:-1]}) else: for have_a in self.have_attach: to_del = [] - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_nets += have_a['networkName'] + "," + all_nets += have_a["networkName"] + "," - diff_delete.update({have_a['networkName']: 'DEPLOYED'}) + diff_delete.update({have_a["networkName"]: "DEPLOYED"}) if all_nets: - diff_undeploy.update({'networkNames': all_nets[:-1]}) + diff_undeploy.update({"networkNames": all_nets[:-1]}) self.diff_detach = diff_detach self.diff_undeploy = diff_undeploy @@ -1122,7 +1239,7 @@ def get_diff_delete(self): def get_diff_override(self): - all_nets = '' + all_nets = "" diff_delete = {} warn_msg = self.get_diff_replace() @@ -1138,28 +1255,35 @@ def get_diff_override(self): # The "if not found" block will go through all attachments under those networks and update them so that # they will be detached and also the network name will be added to delete payload. - found = next((net for net in self.want_create if net['networkName'] == have_a['networkName']), None) + found = next( + ( + net + for net in self.want_create + if net["networkName"] == have_a["networkName"] + ), + None, + ) to_del = [] if not found: - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_nets += have_a['networkName'] + "," + all_nets += have_a["networkName"] + "," # The following is added just to help in deletion, we need to wait for detach transaction to complete # before attempting to delete the network. - diff_delete.update({have_a['networkName']: 'DEPLOYED'}) + diff_delete.update({have_a["networkName"]: "DEPLOYED"}) if all_nets: - diff_undeploy.update({'networkNames': all_nets[:-1]}) + diff_undeploy.update({"networkNames": all_nets[:-1]}) self.diff_create = diff_create self.diff_attach = diff_attach @@ -1171,7 +1295,7 @@ def get_diff_override(self): def get_diff_replace(self): - all_nets = '' + all_nets = "" warn_msg = self.get_diff_merge(replace=True) diff_create = self.diff_create @@ -1185,25 +1309,25 @@ def get_diff_replace(self): # This block will take care of deleting any attachments that are present only on DCNM # but, not on the playbook. In this case, the playbook will have a network and few attaches under it, # but, the attaches may be different to what the DCNM has for the same network. - if have_a['networkName'] == want_a['networkName']: + if have_a["networkName"] == want_a["networkName"]: h_in_w = True - atch_h = have_a['lanAttachList'] - atch_w = want_a.get('lanAttachList') + atch_h = have_a["lanAttachList"] + atch_w = want_a.get("lanAttachList") for a_h in atch_h: - if not a_h['isAttached']: + if not a_h["isAttached"]: continue a_match = False if atch_w: for a_w in atch_w: - if a_h['serialNumber'] == a_w['serialNumber']: + if a_h["serialNumber"] == a_w["serialNumber"]: # Have is already in diff, no need to continue looking for it. a_match = True break if not a_match: - del a_h['isAttached'] - a_h.update({'deployment': False}) + del a_h["isAttached"] + a_h.update({"deployment": False}) r_net_list.append(a_h) break @@ -1211,31 +1335,38 @@ def get_diff_replace(self): # This block will take care of deleting all the attachments which are in DCNM but # are not mentioned in the playbook. The playbook just has the network, but, does not have any attach # under it. - found = next((net for net in self.want_create if net['networkName'] == have_a['networkName']), None) + found = next( + ( + net + for net in self.want_create + if net["networkName"] == have_a["networkName"] + ), + None, + ) if found: - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if not a_h['isAttached']: + if not a_h["isAttached"]: continue - del a_h['isAttached'] - a_h.update({'deployment': False}) + del a_h["isAttached"] + a_h.update({"deployment": False}) r_net_list.append(a_h) if r_net_list: in_diff = False for d_attach in self.diff_attach: - if have_a['networkName'] == d_attach['networkName']: + if have_a["networkName"] == d_attach["networkName"]: in_diff = True - d_attach['lanAttachList'].extend(r_net_list) + d_attach["lanAttachList"].extend(r_net_list) break if not in_diff: r_net_dict = { - 'networkName': have_a['networkName'], - 'lanAttachList': r_net_list + "networkName": have_a["networkName"], + "lanAttachList": r_net_list, } diff_attach.append(r_net_dict) - all_nets += have_a['networkName'] + "," + all_nets += have_a["networkName"] + "," if not all_nets: self.diff_create = diff_create @@ -1244,10 +1375,10 @@ def get_diff_replace(self): return warn_msg if not self.diff_deploy: - diff_deploy.update({'networkNames': all_nets[:-1]}) + diff_deploy.update({"networkNames": all_nets[:-1]}) else: - nets = self.diff_deploy['networkNames'] + "," + all_nets[:-1] - diff_deploy.update({'networkNames': nets}) + nets = self.diff_deploy["networkNames"] + "," + all_nets[:-1] + diff_deploy.update({"networkNames": nets}) self.diff_create = diff_create self.diff_attach = diff_attach @@ -1293,75 +1424,104 @@ def get_diff_merge(self, replace=False): for want_c in self.want_create: found = False for have_c in self.have_create: - if want_c['networkName'] == have_c['networkName']: + if want_c["networkName"] == have_c["networkName"]: found = True - diff, gw_chg, tg_chg, warn_msg, l2only_chg, vn_chg, idesc_chg, mtu_chg, \ - arpsup_chg, dhcp1_ip_chg, dhcp2_ip_chg, dhcp3_ip_chg, dhcp1_vrf_chg, \ - dhcp2_vrf_chg, dhcp3_vrf_chg, dhcp_loopbk_chg = self.diff_for_create(want_c, have_c) - gw_changed.update({want_c['networkName']: gw_chg}) - tg_changed.update({want_c['networkName']: tg_chg}) - l2only_changed.update({want_c['networkName']: l2only_chg}) - vn_changed.update({want_c['networkName']: vn_chg}) - intdesc_changed.update({want_c['networkName']: idesc_chg}) - mtu_changed.update({want_c['networkName']: mtu_chg}) - arpsup_changed.update({want_c['networkName']: arpsup_chg}) - dhcp1_ip_changed.update({want_c['networkName']: dhcp1_ip_chg}) - dhcp2_ip_changed.update({want_c['networkName']: dhcp2_ip_chg}) - dhcp3_ip_changed.update({want_c['networkName']: dhcp3_ip_chg}) - dhcp1_vrf_changed.update({want_c['networkName']: dhcp1_vrf_chg}) - dhcp2_vrf_changed.update({want_c['networkName']: dhcp2_vrf_chg}) - dhcp3_vrf_changed.update({want_c['networkName']: dhcp3_vrf_chg}) - dhcp_loopback_changed.update({want_c['networkName']: dhcp_loopbk_chg}) + ( + diff, + gw_chg, + tg_chg, + warn_msg, + l2only_chg, + vn_chg, + idesc_chg, + mtu_chg, + arpsup_chg, + dhcp1_ip_chg, + dhcp2_ip_chg, + dhcp3_ip_chg, + dhcp1_vrf_chg, + dhcp2_vrf_chg, + dhcp3_vrf_chg, + dhcp_loopbk_chg, + ) = self.diff_for_create(want_c, have_c) + gw_changed.update({want_c["networkName"]: gw_chg}) + tg_changed.update({want_c["networkName"]: tg_chg}) + l2only_changed.update({want_c["networkName"]: l2only_chg}) + vn_changed.update({want_c["networkName"]: vn_chg}) + intdesc_changed.update({want_c["networkName"]: idesc_chg}) + mtu_changed.update({want_c["networkName"]: mtu_chg}) + arpsup_changed.update({want_c["networkName"]: arpsup_chg}) + dhcp1_ip_changed.update({want_c["networkName"]: dhcp1_ip_chg}) + dhcp2_ip_changed.update({want_c["networkName"]: dhcp2_ip_chg}) + dhcp3_ip_changed.update({want_c["networkName"]: dhcp3_ip_chg}) + dhcp1_vrf_changed.update({want_c["networkName"]: dhcp1_vrf_chg}) + dhcp2_vrf_changed.update({want_c["networkName"]: dhcp2_vrf_chg}) + dhcp3_vrf_changed.update({want_c["networkName"]: dhcp3_vrf_chg}) + dhcp_loopback_changed.update( + {want_c["networkName"]: dhcp_loopbk_chg} + ) if diff: diff_create_update.append(diff) break if not found: - net_id = want_c.get('networkId', None) + net_id = want_c.get("networkId", None) if not net_id: # networkId(VNI-id) is not provided by user. # Need to query DCNM to fetch next available networkId and use it here. - method = 'POST' + method = "POST" attempt = 0 - while True and attempt < 10: + while attempt < 10: attempt += 1 path = self.paths["GET_NET_ID"].format(self.fabric) if self.dcnm_version > 11: - net_id_obj = dcnm_send(self.module, 'GET', path) + net_id_obj = dcnm_send(self.module, "GET", path) else: net_id_obj = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(net_id_obj, 'query_dcnm') + missing_fabric, not_ok = self.handle_response( + net_id_obj, "query_dcnm" + ) if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to generate networkId for network: {} " \ - "under fabric: {}".format(want_c['networkName'], self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = ( + "Unable to generate networkId for network: {0} " + "under fabric: {1}".format( + want_c["networkName"], self.fabric + ) + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not net_id_obj['DATA']: + if not net_id_obj["DATA"]: continue if self.dcnm_version == 11: - net_id = net_id_obj['DATA'].get('segmentId') + net_id = net_id_obj["DATA"].get("segmentId") elif self.dcnm_version >= 12: - net_id = net_id_obj['DATA'].get('l2vni') + net_id = net_id_obj["DATA"].get("l2vni") else: - msg = "Unsupported DCNM version: version {}".format(self.dcnm_version) + msg = "Unsupported DCNM version: version {0}".format( + self.dcnm_version + ) self.module.fail_json(msg) if net_id != prev_net_id_fetched: - want_c.update({'networkId': net_id}) + want_c.update({"networkId": net_id}) prev_net_id_fetched = net_id break if not net_id: - self.module.fail_json(msg="Unable to generate networkId for network: {} " - "under fabric: {}".format(want_c['networkName'], self.fabric)) + self.module.fail_json( + msg="Unable to generate networkId for network: {0} " + "under fabric: {1}".format( + want_c["networkName"], self.fabric + ) + ) create_path = self.paths["GET_NET"].format(self.fabric) diff_create_quick.append(want_c) @@ -1369,9 +1529,11 @@ def get_diff_merge(self, replace=False): if self.module.check_mode: continue - resp = dcnm_send(self.module, method, create_path, json.dumps(want_c)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + resp = dcnm_send( + self.module, method, create_path, json.dumps(want_c) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: self.failure(resp) @@ -1380,56 +1542,60 @@ def get_diff_merge(self, replace=False): all_nets = [] for want_a in self.want_attach: - dep_net = '' + dep_net = "" found = False for have_a in self.have_attach: - if want_a['networkName'] == have_a['networkName']: + if want_a["networkName"] == have_a["networkName"]: found = True - diff, net = self.diff_for_attach_deploy(want_a['lanAttachList'], have_a['lanAttachList'], - replace) + diff, net = self.diff_for_attach_deploy( + want_a["lanAttachList"], have_a["lanAttachList"], replace + ) if diff: base = want_a.copy() - del base['lanAttachList'] - base.update({'lanAttachList': diff}) + del base["lanAttachList"] + base.update({"lanAttachList": diff}) diff_attach.append(base) - dep_net = want_a['networkName'] + dep_net = want_a["networkName"] else: - if net or gw_changed.get(want_a['networkName'], False) or \ - tg_changed.get(want_a['networkName'], False) or \ - l2only_changed.get(want_a['networkName'], False) or \ - vn_changed.get(want_a['networkName'], False) or \ - intdesc_changed.get(want_a['networkName'], False) or \ - mtu_changed.get(want_a['networkName'], False) or \ - arpsup_changed.get(want_a['networkName'], False) or \ - dhcp1_ip_changed.get(want_a['networkName'], False) or \ - dhcp2_ip_changed.get(want_a['networkName'], False) or \ - dhcp3_ip_changed.get(want_a['networkName'], False) or \ - dhcp1_vrf_changed.get(want_a['networkName'], False) or \ - dhcp2_vrf_changed.get(want_a['networkName'], False) or \ - dhcp3_vrf_changed.get(want_a['networkName'], False) or \ - dhcp_loopback_changed.get(want_a['networkName'], False): - dep_net = want_a['networkName'] - - if not found and want_a.get('lanAttachList'): + if ( + net + or gw_changed.get(want_a["networkName"], False) + or tg_changed.get(want_a["networkName"], False) + or l2only_changed.get(want_a["networkName"], False) + or vn_changed.get(want_a["networkName"], False) + or intdesc_changed.get(want_a["networkName"], False) + or mtu_changed.get(want_a["networkName"], False) + or arpsup_changed.get(want_a["networkName"], False) + or dhcp1_ip_changed.get(want_a["networkName"], False) + or dhcp2_ip_changed.get(want_a["networkName"], False) + or dhcp3_ip_changed.get(want_a["networkName"], False) + or dhcp1_vrf_changed.get(want_a["networkName"], False) + or dhcp2_vrf_changed.get(want_a["networkName"], False) + or dhcp3_vrf_changed.get(want_a["networkName"], False) + or dhcp_loopback_changed.get(want_a["networkName"], False) + ): + dep_net = want_a["networkName"] + + if not found and want_a.get("lanAttachList"): atch_list = [] - for attach in want_a['lanAttachList']: - del attach['isAttached'] - if bool(attach['deployment']): + for attach in want_a["lanAttachList"]: + del attach["isAttached"] + if bool(attach["deployment"]): atch_list.append(attach) if atch_list: base = want_a.copy() - del base['lanAttachList'] - base.update({'lanAttachList': atch_list}) + del base["lanAttachList"] + base.update({"lanAttachList": atch_list}) diff_attach.append(base) - dep_net = want_a['networkName'] + dep_net = want_a["networkName"] if dep_net: all_nets.append(dep_net) if all_nets: - diff_deploy.update({'networkNames': ','.join(all_nets)}) + diff_deploy.update({"networkNames": ",".join(all_nets)}) self.diff_create = diff_create self.diff_create_update = diff_create_update @@ -1448,8 +1614,12 @@ def format_diff(self): diff_create_update = copy.deepcopy(self.diff_create_update) diff_attach = copy.deepcopy(self.diff_attach) diff_detach = copy.deepcopy(self.diff_detach) - diff_deploy = self.diff_deploy['networkNames'].split(",") if self.diff_deploy else [] - diff_undeploy = self.diff_undeploy['networkNames'].split(",") if self.diff_undeploy else [] + diff_deploy = ( + self.diff_deploy["networkNames"].split(",") if self.diff_deploy else [] + ) + diff_undeploy = ( + self.diff_undeploy["networkNames"].split(",") if self.diff_undeploy else [] + ) diff_create.extend(diff_create_quick) diff_create.extend(diff_create_update) @@ -1458,65 +1628,74 @@ def format_diff(self): for want_d in diff_create: - found_a = next((net for net in diff_attach if net['networkName'] == want_d['networkName']), None) + found_a = next( + ( + net + for net in diff_attach + if net["networkName"] == want_d["networkName"] + ), + None, + ) found_c = want_d - json_to_dict = json.loads(found_c['networkTemplateConfig']) - - found_c.update({'net_name': found_c['networkName']}) - found_c.update({'vrf_name': found_c.get('vrf', "NA")}) - found_c.update({'net_id': found_c['networkId']}) - found_c.update({'vlan_id': json_to_dict.get('vlanId', "")}) - found_c.update({'gw_ip_subnet': json_to_dict.get('gatewayIpAddress', "")}) - found_c.update({'net_template': found_c['networkTemplate']}) - found_c.update({'net_extension_template': found_c['networkExtensionTemplate']}) - found_c.update({'is_l2only': json_to_dict.get('isLayer2Only', False)}) - found_c.update({'vlan_name': json_to_dict.get('vlanName', "")}) - found_c.update({'int_desc': json_to_dict.get('intfDescription', "")}) - found_c.update({'mtu_l3intf': json_to_dict.get('mtu', "")}) - found_c.update({'arp_suppress': json_to_dict.get('suppressArp', False)}) - found_c.update({'dhcp_srvr1_ip': json_to_dict.get('dhcpServerAddr1', "")}) - found_c.update({'dhcp_srvr2_ip': json_to_dict.get('dhcpServerAddr2', "")}) - found_c.update({'dhcp_srvr3_ip': json_to_dict.get('dhcpServerAddr3', "")}) - found_c.update({'dhcp_srvr1_vrf': json_to_dict.get('vrfDhcp', "")}) - found_c.update({'dhcp_srvr2_vrf': json_to_dict.get('vrfDhcp2', "")}) - found_c.update({'dhcp_srvr3_vrf': json_to_dict.get('vrfDhcp3', "")}) - found_c.update({'dhcp_loopback_id': json_to_dict.get('loopbackId', "")}) - found_c.update({'attach': []}) - - del found_c['fabric'] - del found_c['networkName'] - del found_c['networkId'] - del found_c['networkTemplate'] - del found_c['networkExtensionTemplate'] - del found_c['networkTemplateConfig'] - del found_c['vrf'] - - if diff_deploy and found_c['net_name'] in diff_deploy: - diff_deploy.remove(found_c['net_name']) + json_to_dict = json.loads(found_c["networkTemplateConfig"]) + + found_c.update({"net_name": found_c["networkName"]}) + found_c.update({"vrf_name": found_c.get("vrf", "NA")}) + found_c.update({"net_id": found_c["networkId"]}) + found_c.update({"vlan_id": json_to_dict.get("vlanId", "")}) + found_c.update({"gw_ip_subnet": json_to_dict.get("gatewayIpAddress", "")}) + found_c.update({"net_template": found_c["networkTemplate"]}) + found_c.update( + {"net_extension_template": found_c["networkExtensionTemplate"]} + ) + found_c.update({"is_l2only": json_to_dict.get("isLayer2Only", False)}) + found_c.update({"vlan_name": json_to_dict.get("vlanName", "")}) + found_c.update({"int_desc": json_to_dict.get("intfDescription", "")}) + found_c.update({"mtu_l3intf": json_to_dict.get("mtu", "")}) + found_c.update({"arp_suppress": json_to_dict.get("suppressArp", False)}) + found_c.update({"dhcp_srvr1_ip": json_to_dict.get("dhcpServerAddr1", "")}) + found_c.update({"dhcp_srvr2_ip": json_to_dict.get("dhcpServerAddr2", "")}) + found_c.update({"dhcp_srvr3_ip": json_to_dict.get("dhcpServerAddr3", "")}) + found_c.update({"dhcp_srvr1_vrf": json_to_dict.get("vrfDhcp", "")}) + found_c.update({"dhcp_srvr2_vrf": json_to_dict.get("vrfDhcp2", "")}) + found_c.update({"dhcp_srvr3_vrf": json_to_dict.get("vrfDhcp3", "")}) + found_c.update({"dhcp_loopback_id": json_to_dict.get("loopbackId", "")}) + found_c.update({"attach": []}) + + del found_c["fabric"] + del found_c["networkName"] + del found_c["networkId"] + del found_c["networkTemplate"] + del found_c["networkExtensionTemplate"] + del found_c["networkTemplateConfig"] + del found_c["vrf"] + + if diff_deploy and found_c["net_name"] in diff_deploy: + diff_deploy.remove(found_c["net_name"]) if not found_a: diff.append(found_c) continue - attach = found_a['lanAttachList'] + attach = found_a["lanAttachList"] for a_w in attach: attach_d = {} detach_d = {} for k, v in self.ip_sn.items(): - if v == a_w['serialNumber']: - attach_d.update({'ip_address': k}) + if v == a_w["serialNumber"]: + attach_d.update({"ip_address": k}) break - if a_w['detachSwitchPorts']: - detach_d.update({'ip_address': attach_d['ip_address']}) - detach_d.update({'ports': a_w['detachSwitchPorts']}) - detach_d.update({'deploy': False}) - found_c['attach'].append(detach_d) - attach_d.update({'ports': a_w['switchPorts']}) - attach_d.update({'deploy': a_w['deployment']}) - found_c['attach'].append(attach_d) + if a_w["detachSwitchPorts"]: + detach_d.update({"ip_address": attach_d["ip_address"]}) + detach_d.update({"ports": a_w["detachSwitchPorts"]}) + detach_d.update({"deploy": False}) + found_c["attach"].append(detach_d) + attach_d.update({"ports": a_w["switchPorts"]}) + attach_d.update({"deploy": a_w["deployment"]}) + found_c["attach"].append(attach_d) diff.append(found_c) @@ -1525,50 +1704,50 @@ def format_diff(self): for vrf in diff_attach: new_attach_dict = {} new_attach_list = [] - attach = vrf['lanAttachList'] + attach = vrf["lanAttachList"] for a_w in attach: attach_d = {} detach_d = {} for k, v in self.ip_sn.items(): - if v == a_w['serialNumber']: - attach_d.update({'ip_address': k}) + if v == a_w["serialNumber"]: + attach_d.update({"ip_address": k}) break - if a_w['detachSwitchPorts']: - detach_d.update({'ip_address': attach_d['ip_address']}) - detach_d.update({'ports': a_w['detachSwitchPorts']}) - detach_d.update({'deploy': False}) + if a_w["detachSwitchPorts"]: + detach_d.update({"ip_address": attach_d["ip_address"]}) + detach_d.update({"ports": a_w["detachSwitchPorts"]}) + detach_d.update({"deploy": False}) new_attach_list.append(detach_d) - attach_d.update({'ports': a_w['switchPorts']}) - attach_d.update({'deploy': a_w['deployment']}) + attach_d.update({"ports": a_w["switchPorts"]}) + attach_d.update({"deploy": a_w["deployment"]}) new_attach_list.append(attach_d) if new_attach_list: - if diff_deploy and vrf['networkName'] in diff_deploy: - diff_deploy.remove(vrf['networkName']) - new_attach_dict.update({'attach': new_attach_list}) - new_attach_dict.update({'net_name': vrf['networkName']}) + if diff_deploy and vrf["networkName"] in diff_deploy: + diff_deploy.remove(vrf["networkName"]) + new_attach_dict.update({"attach": new_attach_list}) + new_attach_dict.update({"net_name": vrf["networkName"]}) diff.append(new_attach_dict) for net in diff_deploy: - new_deploy_dict = {'net_name': net} + new_deploy_dict = {"net_name": net} diff.append(new_deploy_dict) self.diff_input_format = diff def get_diff_query(self): - method = 'GET' + method = "GET" path = self.paths["GET_VRF"].format(self.fabric) vrf_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(vrf_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find VRFs under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find VRFs under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return @@ -1578,34 +1757,40 @@ def get_diff_query(self): if self.have_create or self.have_attach: for want_c in self.want_create: # Query the Network - item = {'parent': {}, 'attach': []} - path = self.paths["GET_NET_NAME"].format(self.fabric, want_c['networkName']) + item = {"parent": {}, "attach": []} + path = self.paths["GET_NET_NAME"].format( + self.fabric, want_c["networkName"] + ) network = dcnm_send(self.module, method, path) - if not network['DATA']: + if not network["DATA"]: continue - net = network['DATA'] - if (want_c['networkName'] == net['networkName']): - item['parent'] = net - item['parent']['networkTemplateConfig'] = json.loads(net['networkTemplateConfig']) + net = network["DATA"] + if want_c["networkName"] == net["networkName"]: + item["parent"] = net + item["parent"]["networkTemplateConfig"] = json.loads( + net["networkTemplateConfig"] + ) # Query the Attachment for the found Networks - path = self.paths["GET_NET_ATTACH"].format(self.fabric, want_c['networkName']) + path = self.paths["GET_NET_ATTACH"].format( + self.fabric, want_c["networkName"] + ) net_attach_objects = dcnm_send(self.module, method, path) - if not net_attach_objects['DATA']: + if not net_attach_objects["DATA"]: return - for net_attach in net_attach_objects['DATA']: - if want_c['networkName'] == net_attach['networkName']: - if not net_attach.get('lanAttachList'): + for net_attach in net_attach_objects["DATA"]: + if want_c["networkName"] == net_attach["networkName"]: + if not net_attach.get("lanAttachList"): continue - attach_list = net_attach['lanAttachList'] + attach_list = net_attach["lanAttachList"] for attach in attach_list: # append the attach network details - item['attach'].append(attach) + item["attach"].append(attach) query.append(item) else: @@ -1613,37 +1798,41 @@ def get_diff_query(self): path = self.paths["GET_NET"].format(self.fabric) networks = dcnm_send(self.module, method, path) - if not networks['DATA']: + if not networks["DATA"]: return - for net in networks['DATA']: - item = {'parent': {}, 'attach': []} + for net in networks["DATA"]: + item = {"parent": {}, "attach": []} # append the parent network details - item['parent'] = net - item['parent']['networkTemplateConfig'] = json.loads(net['networkTemplateConfig']) + item["parent"] = net + item["parent"]["networkTemplateConfig"] = json.loads( + net["networkTemplateConfig"] + ) # fetch the attachment for the network - path = self.paths["GET_NET_ATTACH"].format(self.fabric, net['networkName']) + path = self.paths["GET_NET_ATTACH"].format( + self.fabric, net["networkName"] + ) net_attach_objects = dcnm_send(self.module, method, path) - if not net_attach_objects['DATA']: + if not net_attach_objects["DATA"]: return - for net_attach in net_attach_objects['DATA']: - if not net_attach.get('lanAttachList'): + for net_attach in net_attach_objects["DATA"]: + if not net_attach.get("lanAttachList"): continue - attach_list = net_attach['lanAttachList'] + attach_list = net_attach["lanAttachList"] for attach in attach_list: # append the attach network details - item['attach'].append(attach) + item["attach"].append(attach) query.append(item) self.query = query def wait_for_del_ready(self): - method = 'GET' + method = "GET" if self.diff_delete: for net in self.diff_delete: state = False @@ -1651,18 +1840,21 @@ def wait_for_del_ready(self): while not state: resp = dcnm_send(self.module, method, path) state = True - if resp['DATA']: - attach_list = resp['DATA'][0]['lanAttachList'] + if resp["DATA"]: + attach_list = resp["DATA"][0]["lanAttachList"] for atch in attach_list: - if atch['lanAttachState'] == 'OUT-OF-SYNC' or atch['lanAttachState'] == 'FAILED': - self.diff_delete.update({net: 'OUT-OF-SYNC'}) + if ( + atch["lanAttachState"] == "OUT-OF-SYNC" + or atch["lanAttachState"] == "FAILED" + ): + self.diff_delete.update({net: "OUT-OF-SYNC"}) break - if atch['lanAttachState'] != 'NA': - self.diff_delete.update({net: 'DEPLOYED'}) + if atch["lanAttachState"] != "NA": + self.diff_delete.update({net: "DEPLOYED"}) state = False time.sleep(self.WAIT_TIME_FOR_DELETE_LOOP) break - self.diff_delete.update({net: 'NA'}) + self.diff_delete.update({net: "NA"}) return True @@ -1671,20 +1863,20 @@ def update_ms_fabric(self, diff): return for list_elem in diff: - for node in list_elem['lanAttachList']: - node['fabric'] = self.sn_fab[node['serialNumber']] + for node in list_elem["lanAttachList"]: + node["fabric"] = self.sn_fab[node["serialNumber"]] def push_to_remote(self, is_rollback=False): path = self.paths["GET_NET"].format(self.fabric) - method = 'PUT' + method = "PUT" if self.diff_create_update: for net in self.diff_create_update: - update_path = path + '/{}'.format(net['networkName']) + update_path = path + "/{0}".format(net["networkName"]) resp = dcnm_send(self.module, method, update_path, json.dumps(net)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: self.failed_to_rollback = True @@ -1697,45 +1889,49 @@ def push_to_remote(self, is_rollback=False): # needed specially for state: overridden # - method = 'POST' + method = "POST" if self.diff_detach: - detach_path = path + '/attachments' + detach_path = path + "/attachments" # Update the fabric name to specific fabric which the switches are part of. self.update_ms_fabric(self.diff_detach) - resp = dcnm_send(self.module, method, detach_path, json.dumps(self.diff_detach)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "attach") + resp = dcnm_send( + self.module, method, detach_path, json.dumps(self.diff_detach) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "attach") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_undeploy: - deploy_path = path + '/deployments' - resp = dcnm_send(self.module, method, deploy_path, json.dumps(self.diff_undeploy)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "deploy") + deploy_path = path + "/deployments" + resp = dcnm_send( + self.module, method, deploy_path, json.dumps(self.diff_undeploy) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "deploy") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - method = 'DELETE' - del_failure = '' + method = "DELETE" + del_failure = "" if self.diff_delete and self.wait_for_del_ready(): for net, state in self.diff_delete.items(): - if state == 'OUT-OF-SYNC': + if state == "OUT-OF-SYNC": del_failure += net + "," continue delete_path = path + "/" + net resp = dcnm_send(self.module, method, delete_path) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "delete") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "delete") if fail: if is_rollback: self.failed_to_rollback = True @@ -1743,8 +1939,8 @@ def push_to_remote(self, is_rollback=False): self.failure(resp) if del_failure: - resp = 'Deletion of Networkss {} has failed'.format(del_failure[:-1]) - self.result['response'].append(resp) + resp = "Deletion of Networkss {0} has failed".format(del_failure[:-1]) + self.result["response"].append(resp) if is_rollback: self.failed_to_rollback = True return @@ -1752,67 +1948,75 @@ def push_to_remote(self, is_rollback=False): if self.diff_create: for net in self.diff_create: - json_to_dict = json.loads(net['networkTemplateConfig']) - vlanId = json_to_dict.get('vlanId', "") + json_to_dict = json.loads(net["networkTemplateConfig"]) + vlanId = json_to_dict.get("vlanId", "") if not vlanId: vlan_path = self.paths["GET_VLAN"].format(self.fabric) - vlan_data = dcnm_send(self.module, 'GET', vlan_path) + vlan_data = dcnm_send(self.module, "GET", vlan_path) - if vlan_data['RETURN_CODE'] != 200: - self.module.fail_json(msg='Failure getting autogenerated vlan_id {}'.format(vlan_data)) - vlanId = vlan_data['DATA'] + if vlan_data["RETURN_CODE"] != 200: + self.module.fail_json( + msg="Failure getting autogenerated vlan_id {0}".format( + vlan_data + ) + ) + vlanId = vlan_data["DATA"] t_conf = { - 'vlanId': vlanId, - 'gatewayIpAddress': json_to_dict.get('gatewayIpAddress', ""), - 'isLayer2Only': json_to_dict.get('isLayer2Only', False), - 'tag': json_to_dict.get('tag', ""), - 'vlanName': json_to_dict.get('vlanName', ""), - 'intfDescription': json_to_dict.get('intfDescription', ""), - 'mtu': json_to_dict.get('mtu', ""), - 'suppressArp': json_to_dict.get('suppressArp', False), - 'dhcpServerAddr1': json_to_dict.get('dhcpServerAddr1', ""), - 'dhcpServerAddr2': json_to_dict.get('dhcpServerAddr2', ""), - 'dhcpServerAddr3': json_to_dict.get('dhcpServerAddr3', ""), - 'vrfDhcp': json_to_dict.get('vrfDhcp', ""), - 'vrfDhcp2': json_to_dict.get('vrfDhcp2', ""), - 'vrfDhcp3': json_to_dict.get('vrfDhcp3', ""), - 'loopbackId': json_to_dict.get('loopbackId', "") + "vlanId": vlanId, + "gatewayIpAddress": json_to_dict.get("gatewayIpAddress", ""), + "isLayer2Only": json_to_dict.get("isLayer2Only", False), + "tag": json_to_dict.get("tag", ""), + "vlanName": json_to_dict.get("vlanName", ""), + "intfDescription": json_to_dict.get("intfDescription", ""), + "mtu": json_to_dict.get("mtu", ""), + "suppressArp": json_to_dict.get("suppressArp", False), + "dhcpServerAddr1": json_to_dict.get("dhcpServerAddr1", ""), + "dhcpServerAddr2": json_to_dict.get("dhcpServerAddr2", ""), + "dhcpServerAddr3": json_to_dict.get("dhcpServerAddr3", ""), + "vrfDhcp": json_to_dict.get("vrfDhcp", ""), + "vrfDhcp2": json_to_dict.get("vrfDhcp2", ""), + "vrfDhcp3": json_to_dict.get("vrfDhcp3", ""), + "loopbackId": json_to_dict.get("loopbackId", ""), } - net.update({'networkTemplateConfig': json.dumps(t_conf)}) + net.update({"networkTemplateConfig": json.dumps(t_conf)}) - method = 'POST' + method = "POST" resp = dcnm_send(self.module, method, path, json.dumps(net)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_attach: - attach_path = path + '/attachments' + attach_path = path + "/attachments" # Update the fabric name to specific fabric which the switches are part of. self.update_ms_fabric(self.diff_attach) for attempt in range(0, 50): - resp = dcnm_send(self.module, method, attach_path, json.dumps(self.diff_attach)) + resp = dcnm_send( + self.module, method, attach_path, json.dumps(self.diff_attach) + ) update_in_progress = False - for key in resp['DATA'].keys(): - if re.search(r'Failed.*Please try after some time', str(resp['DATA'][key])): + for key in resp["DATA"].keys(): + if re.search( + r"Failed.*Please try after some time", str(resp["DATA"][key]) + ): update_in_progress = True if update_in_progress: time.sleep(1) continue - else: - break - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "attach") + + break + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "attach") # If we get here and an update_in_progress is True then # not all of the attachments were successful which represents a # failure condition. @@ -1822,12 +2026,14 @@ def push_to_remote(self, is_rollback=False): return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_deploy: - deploy_path = path + '/deployments' - resp = dcnm_send(self.module, method, deploy_path, json.dumps(self.diff_deploy)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "deploy") + deploy_path = path + "/deployments" + resp = dcnm_send( + self.module, method, deploy_path, json.dumps(self.diff_deploy) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "deploy") if fail: if is_rollback: self.failed_to_rollback = True @@ -1837,133 +2043,184 @@ def push_to_remote(self, is_rollback=False): def validate_input(self): """Parse the playbook values, validate to param specs.""" - state = self.params['state'] + state = self.params["state"] - if state == 'query': + if state == "query": net_spec = dict( - net_name=dict(required=True, type='str', length_max=64), - net_id=dict(type='int', range_max=16777214), - vrf_name=dict(type='str', length_max=32), - attach=dict(type='list'), - deploy=dict(type='bool'), - gw_ip_subnet=dict(type='ipv4_subnet', default=""), - vlan_id=dict(type='int', range_max=4094), - routing_tag=dict(type='int', default=12345, range_max=4294967295), - net_template=dict(type='str', default='Default_Network_Universal'), - net_extension_template=dict(type='str', default='Default_Network_Extension_Universal'), - is_l2only=dict(type='bool', default=False), - vlan_name=dict(type='str', length_max=128), - int_desc=dict(type='str', length_max=258), - mtu_l3intf=dict(type='int', range_min=68, range_max=9216), - arp_suppress=dict(type='bool', default=False), - dhcp_srvr1_ip=dict(type='ipv4', default=""), - dhcp_srvr2_ip=dict(type='ipv4', default=""), - dhcp_srvr3_ip=dict(type='ipv4', default=""), - dhcp_srvr1_vrf=dict(type='str', length_max=32), - dhcp_srvr2_vrf=dict(type='str', length_max=32), - dhcp_srvr3_vrf=dict(type='str', length_max=32), - dhcp_loopback_id=dict(type='int', range_min=0, range_max=1023) + net_name=dict(required=True, type="str", length_max=64), + net_id=dict(type="int", range_max=16777214), + vrf_name=dict(type="str", length_max=32), + attach=dict(type="list"), + deploy=dict(type="bool"), + gw_ip_subnet=dict(type="ipv4_subnet", default=""), + vlan_id=dict(type="int", range_max=4094), + routing_tag=dict(type="int", default=12345, range_max=4294967295), + net_template=dict(type="str", default="Default_Network_Universal"), + net_extension_template=dict( + type="str", default="Default_Network_Extension_Universal" + ), + is_l2only=dict(type="bool", default=False), + vlan_name=dict(type="str", length_max=128), + int_desc=dict(type="str", length_max=258), + mtu_l3intf=dict(type="int", range_min=68, range_max=9216), + arp_suppress=dict(type="bool", default=False), + dhcp_srvr1_ip=dict(type="ipv4", default=""), + dhcp_srvr2_ip=dict(type="ipv4", default=""), + dhcp_srvr3_ip=dict(type="ipv4", default=""), + dhcp_srvr1_vrf=dict(type="str", length_max=32), + dhcp_srvr2_vrf=dict(type="str", length_max=32), + dhcp_srvr3_vrf=dict(type="str", length_max=32), + dhcp_loopback_id=dict(type="int", range_min=0, range_max=1023), ) att_spec = dict( - ip_address=dict(required=True, type='str'), - ports=dict(required=True, type='list'), - deploy=dict(type='bool', default=True) + ip_address=dict(required=True, type="str"), + ports=dict(required=True, type="list"), + deploy=dict(type="bool", default=True), ) if self.config: msg = None # Validate net params - valid_net, invalid_params = validate_list_of_dicts(self.config, net_spec) + valid_net, invalid_params = validate_list_of_dicts( + self.config, net_spec + ) for net in valid_net: - if net.get('attach'): - valid_att, invalid_att = validate_list_of_dicts(net['attach'], att_spec) - net['attach'] = valid_att + if net.get("attach"): + valid_att, invalid_att = validate_list_of_dicts( + net["attach"], att_spec + ) + net["attach"] = valid_att invalid_params.extend(invalid_att) - if net.get('is_l2only', False) is True: - if net.get('vrf_name', "") is None or net.get('vrf_name', "") == "": - net['vrf_name'] = 'NA' + if net.get("is_l2only", False) is True: + if ( + net.get("vrf_name", "") is None + or net.get("vrf_name", "") == "" + ): + net["vrf_name"] = "NA" self.validated.append(net) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: net_spec = dict( - net_name=dict(required=True, type='str', length_max=64), - net_id=dict(type='int', range_max=16777214), - vrf_name=dict(type='str', length_max=32), - attach=dict(type='list'), - deploy=dict(type='bool'), - gw_ip_subnet=dict(type='ipv4_subnet', default=""), - vlan_id=dict(type='int', range_max=4094), - routing_tag=dict(type='int', default=12345, range_max=4294967295), - net_template=dict(type='str', default='Default_Network_Universal'), - net_extension_template=dict(type='str', default='Default_Network_Extension_Universal'), - is_l2only=dict(type='bool', default=False), - vlan_name=dict(type='str', length_max=128), - int_desc=dict(type='str', length_max=258), - mtu_l3intf=dict(type='int', range_min=68, range_max=9216), - arp_suppress=dict(type='bool', default=False), - dhcp_srvr1_ip=dict(type='ipv4', default=""), - dhcp_srvr2_ip=dict(type='ipv4', default=""), - dhcp_srvr3_ip=dict(type='ipv4', default=""), - dhcp_srvr1_vrf=dict(type='str', length_max=32), - dhcp_srvr2_vrf=dict(type='str', length_max=32), - dhcp_srvr3_vrf=dict(type='str', length_max=32), - dhcp_loopback_id=dict(type='int', range_min=0, range_max=1023) + net_name=dict(required=True, type="str", length_max=64), + net_id=dict(type="int", range_max=16777214), + vrf_name=dict(type="str", length_max=32), + attach=dict(type="list"), + deploy=dict(type="bool"), + gw_ip_subnet=dict(type="ipv4_subnet", default=""), + vlan_id=dict(type="int", range_max=4094), + routing_tag=dict(type="int", default=12345, range_max=4294967295), + net_template=dict(type="str", default="Default_Network_Universal"), + net_extension_template=dict( + type="str", default="Default_Network_Extension_Universal" + ), + is_l2only=dict(type="bool", default=False), + vlan_name=dict(type="str", length_max=128), + int_desc=dict(type="str", length_max=258), + mtu_l3intf=dict(type="int", range_min=68, range_max=9216), + arp_suppress=dict(type="bool", default=False), + dhcp_srvr1_ip=dict(type="ipv4", default=""), + dhcp_srvr2_ip=dict(type="ipv4", default=""), + dhcp_srvr3_ip=dict(type="ipv4", default=""), + dhcp_srvr1_vrf=dict(type="str", length_max=32), + dhcp_srvr2_vrf=dict(type="str", length_max=32), + dhcp_srvr3_vrf=dict(type="str", length_max=32), + dhcp_loopback_id=dict(type="int", range_min=0, range_max=1023), ) att_spec = dict( - ip_address=dict(required=True, type='str'), - ports=dict(required=True, type='list'), - deploy=dict(type='bool', default=True) + ip_address=dict(required=True, type="str"), + ports=dict(required=True, type="list"), + deploy=dict(type="bool", default=True), ) if self.config: msg = None # Validate net params - valid_net, invalid_params = validate_list_of_dicts(self.config, net_spec) + valid_net, invalid_params = validate_list_of_dicts( + self.config, net_spec + ) for net in valid_net: - if net.get('attach'): - valid_att, invalid_att = validate_list_of_dicts(net['attach'], att_spec) - net['attach'] = valid_att + if net.get("attach"): + valid_att, invalid_att = validate_list_of_dicts( + net["attach"], att_spec + ) + net["attach"] = valid_att invalid_params.extend(invalid_att) - if state != 'deleted': - if net.get('is_l2only', False) is True: - if net.get('vrf_name', "") is not None and net.get('vrf_name', "") != "": - invalid_params.append("vrf_name should not be specified for L2 Networks") + if state != "deleted": + if net.get("is_l2only", False) is True: + if ( + net.get("vrf_name", "") is not None + and net.get("vrf_name", "") != "" + ): + invalid_params.append( + "vrf_name should not be specified for L2 Networks" + ) else: - net['vrf_name'] = 'NA' + net["vrf_name"] = "NA" else: - if net.get('vrf_name', "") is None: - invalid_params.append("vrf_name is required for L3 Networks") - - if (net.get('dhcp_srvr1_ip') and not net.get('dhcp_srvr1_vrf')) or \ - (net.get('dhcp_srvr1_vrf') and not net.get('dhcp_srvr1_ip')) or \ - (net.get('dhcp_srvr2_ip') and not net.get('dhcp_srvr2_vrf')) or \ - (net.get('dhcp_srvr2_vrf') and not net.get('dhcp_srvr2_ip')) or \ - (net.get('dhcp_srvr3_ip') and not net.get('dhcp_srvr3_vrf')) or \ - (net.get('dhcp_srvr3_vrf') and not net.get('dhcp_srvr3_ip')): - invalid_params.append("DHCP server IP should be specified along with DHCP server VRF") + if net.get("vrf_name", "") is None: + invalid_params.append( + "vrf_name is required for L3 Networks" + ) + + if ( + (net.get("dhcp_srvr1_ip") and not net.get("dhcp_srvr1_vrf")) + or ( + net.get("dhcp_srvr1_vrf") + and not net.get("dhcp_srvr1_ip") + ) + or ( + net.get("dhcp_srvr2_ip") + and not net.get("dhcp_srvr2_vrf") + ) + or ( + net.get("dhcp_srvr2_vrf") + and not net.get("dhcp_srvr2_ip") + ) + or ( + net.get("dhcp_srvr3_ip") + and not net.get("dhcp_srvr3_vrf") + ) + or ( + net.get("dhcp_srvr3_vrf") + and not net.get("dhcp_srvr3_ip") + ) + ): + invalid_params.append( + "DHCP server IP should be specified along with DHCP server VRF" + ) self.validated.append(net) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: - state = self.params['state'] + state = self.params["state"] msg = None - if state == 'merged' or state == 'overridden' or \ - state == 'replaced' or state == 'query': - msg = "config: element is mandatory for this state {}".format(state) + if ( + state == "merged" + or state == "overridden" + or state == "replaced" + or state == "query" + ): + msg = "config: element is mandatory for this state {0}".format( + state + ) if msg: self.module.fail_json(msg=msg) @@ -1975,31 +2232,31 @@ def handle_response(self, resp, op): res = resp.copy() - if op == 'query_dcnm': + if op == "query_dcnm": # This if blocks handles responses to the query APIs against DCNM. # Basically all GET operations. # - if res.get('ERROR') == 'Not Found' and res['RETURN_CODE'] == 404: + if res.get("ERROR") == "Not Found" and res["RETURN_CODE"] == 404: return True, False - if res['RETURN_CODE'] != 200 or res['MESSAGE'] != 'OK': + if res["RETURN_CODE"] != 200 or res["MESSAGE"] != "OK": return False, True return False, False # Responses to all other operations POST and PUT are handled here. - if res.get('MESSAGE') != 'OK': + if res.get("MESSAGE") != "OK": fail = True changed = False return fail, changed - if res.get('ERROR'): + if res.get("ERROR"): fail = True changed = False - if op == 'attach' and 'is in use already' in str(res.values()): + if op == "attach" and "is in use already" in str(res.values()): fail = True changed = False - if op == 'attach' and 'Invalid interfaces' in str(res.values()): + if op == "attach" and "Invalid interfaces" in str(res.values()): fail = True changed = True - if op == 'deploy' and 'No switches PENDING for deployment' in str(res.values()): + if op == "deploy" and "No switches PENDING for deployment" in str(res.values()): changed = False return fail, changed @@ -2024,16 +2281,18 @@ def failure(self, resp): if self.failed_to_rollback: msg1 = "FAILED - Attempted rollback of the task has failed, may need manual intervention" else: - msg1 = 'SUCCESS - Attempted rollback of the task has succeeded' + msg1 = "SUCCESS - Attempted rollback of the task has succeeded" res = copy.deepcopy(resp) - res.update({'ROLLBACK_RESULT': msg1}) + res.update({"ROLLBACK_RESULT": msg1}) - if not resp.get('DATA'): - data = copy.deepcopy(resp.get('DATA')) - if data.get('stackTrace'): - data.update({'stackTrace': 'Stack trace is hidden, use \'-vvvvv\' to print it'}) - res.update({'DATA': data}) + if not resp.get("DATA"): + data = copy.deepcopy(resp.get("DATA")) + if data.get("stackTrace"): + data.update( + {"stackTrace": "Stack trace is hidden, use '-vvvvv' to print it"} + ) + res.update({"DATA": data}) if self.module._verbosity >= 5: self.module.fail_json(msg=res) @@ -2054,8 +2313,8 @@ def dcnm_update_network_information(self, want, have, cfg): if cfg.get("net_extension_template", None) is None: want["networkExtensionTemplate"] = have["networkExtensionTemplate"] - json_to_dict_want = json.loads(want['networkTemplateConfig']) - json_to_dict_have = json.loads(have['networkTemplateConfig']) + json_to_dict_want = json.loads(want["networkTemplateConfig"]) + json_to_dict_have = json.loads(have["networkTemplateConfig"]) if cfg.get("vlan_id", None) is None: json_to_dict_want["vlanId"] = json_to_dict_have["vlanId"] @@ -2068,7 +2327,9 @@ def dcnm_update_network_information(self, want, have, cfg): json_to_dict_want["tag"] = int(json_to_dict_want["tag"]) if cfg.get("gw_ip_subnet", None) is None: - json_to_dict_want["gatewayIpAddress"] = json_to_dict_have["gatewayIpAddress"] + json_to_dict_want["gatewayIpAddress"] = json_to_dict_have[ + "gatewayIpAddress" + ] if cfg.get("is_l2only", None) is None: json_to_dict_want["isLayer2Only"] = json_to_dict_have["isLayer2Only"] @@ -2116,7 +2377,7 @@ def dcnm_update_network_information(self, want, have, cfg): if cfg.get("dhcp_loopback_id", None) is None: json_to_dict_want["loopbackId"] = json_to_dict_have["loopbackId"] - want.update({'networkTemplateConfig': json.dumps(json_to_dict_want)}) + want.update({"networkTemplateConfig": json.dumps(json_to_dict_want)}) def update_want(self): """ @@ -2147,47 +2408,43 @@ def update_want(self): match_have = [ have for have in self.have_create - if ( - (net["networkName"] == have["networkName"]) - ) + if ((net["networkName"] == have["networkName"])) ] if match_have == []: continue # Get the network from self.config to check if a particular object is included or not match_cfg = [ - cfg - for cfg in self.config - if ( - (net["networkName"] == cfg["net_name"]) - ) + cfg for cfg in self.config if ((net["networkName"] == cfg["net_name"])) ] if match_cfg == []: continue - self.dcnm_update_network_information( - net, match_have[0], match_cfg[0] - ) + self.dcnm_update_network_information(net, match_have[0], match_cfg[0]) def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - fabric=dict(required=True, type='str'), - config=dict(required=False, type='list', elements='dict'), - state=dict(default='merged', - choices=['merged', 'replaced', 'deleted', 'overridden', 'query']), + fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + default="merged", + choices=["merged", "replaced", "deleted", "overridden", "query"], + ), ) - module = AnsibleModule(argument_spec=element_spec, - supports_check_mode=True) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_net = DcnmNetwork(module) if not dcnm_net.ip_sn: - module.fail_json(msg="Fabric {} missing on DCNM or does not have any switches".format(dcnm_net.fabric)) + module.fail_json( + msg="Fabric {0} missing on DCNM or does not have any switches".format( + dcnm_net.fabric + ) + ) dcnm_net.validate_input() @@ -2202,36 +2459,43 @@ def main(): # they must be purged or defaulted. dcnm_net.update_want() - if module.params['state'] == 'merged': + if module.params["state"] == "merged": warn_msg = dcnm_net.get_diff_merge() - if module.params['state'] == 'replaced': + if module.params["state"] == "replaced": warn_msg = dcnm_net.get_diff_replace() - if module.params['state'] == 'overridden': + if module.params["state"] == "overridden": warn_msg = dcnm_net.get_diff_override() - if module.params['state'] == 'deleted': + if module.params["state"] == "deleted": dcnm_net.get_diff_delete() - if module.params['state'] == 'query': + if module.params["state"] == "query": dcnm_net.get_diff_query() - dcnm_net.result['response'] = dcnm_net.query - - dcnm_net.result['warnings'].append(warn_msg) if warn_msg else [] - - if dcnm_net.diff_create or dcnm_net.diff_create_quick or dcnm_net.diff_attach \ - or dcnm_net.diff_deploy or dcnm_net.diff_delete or dcnm_net.diff_create_update \ - or dcnm_net.diff_detach or dcnm_net.diff_undeploy: - dcnm_net.result['changed'] = True + dcnm_net.result["response"] = dcnm_net.query + + dcnm_net.result["warnings"].append(warn_msg) if warn_msg else [] + + if ( + dcnm_net.diff_create + or dcnm_net.diff_create_quick + or dcnm_net.diff_attach + or dcnm_net.diff_deploy + or dcnm_net.diff_delete + or dcnm_net.diff_create_update + or dcnm_net.diff_detach + or dcnm_net.diff_undeploy + ): + dcnm_net.result["changed"] = True else: module.exit_json(**dcnm_net.result) dcnm_net.format_diff() - dcnm_net.result['diff'] = dcnm_net.diff_input_format + dcnm_net.result["diff"] = dcnm_net.diff_input_format if module.check_mode: - dcnm_net.result['changed'] = False + dcnm_net.result["changed"] = False module.exit_json(**dcnm_net.result) dcnm_net.push_to_remote() @@ -2239,5 +2503,5 @@ def main(): module.exit_json(**dcnm_net.result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_policy.py b/plugins/modules/dcnm_policy.py index 800f1cc98..08e93b990 100644 --- a/plugins/modules/dcnm_policy.py +++ b/plugins/modules/dcnm_policy.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mallik Mudigonda" DOCUMENTATION = """ @@ -348,32 +350,33 @@ dcnm_get_ip_addr_info, validate_list_of_dicts, get_ip_sn_dict, - dcnm_version_supported + dcnm_version_supported, ) + class DcnmPolicy: dcnm_policy_paths = { 11: { - "POLICY_WITH_ID": "/rest/control/policies/{}", - "POLICY_GET_SWITCHES": "/rest/control/policies/switches?serialNumber={}", - "POLICY_BULK_CREATE": "/rest/control/policies/bulk-create", - "POLICY_MARK_DELETE": "/rest/control/policies/{}/mark-delete", - "POLICY_DEPLOY": "/rest/control/policies/deploy", - "POLICY_CFG_DEPLOY": "/rest/control/fabrics/{}/config-deploy/", - "POLICY_WITH_POLICY_ID": "/rest/control/policies/{}", - "CONFIG_PREVIEW": "/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true" - }, + "POLICY_WITH_ID": "/rest/control/policies/{}", + "POLICY_GET_SWITCHES": "/rest/control/policies/switches?serialNumber={}", + "POLICY_BULK_CREATE": "/rest/control/policies/bulk-create", + "POLICY_MARK_DELETE": "/rest/control/policies/{}/mark-delete", + "POLICY_DEPLOY": "/rest/control/policies/deploy", + "POLICY_CFG_DEPLOY": "/rest/control/fabrics/{}/config-deploy/", + "POLICY_WITH_POLICY_ID": "/rest/control/policies/{}", + "CONFIG_PREVIEW": "/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true", + }, 12: { - "POLICY_WITH_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", - "POLICY_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", - "POLICY_BULK_CREATE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/bulk-create", - "POLICY_MARK_DELETE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}/mark-delete", - "POLICY_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/deploy", - "POLICY_CFG_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/fabrics/{}/config-deploy/", - "POLICY_WITH_POLICY_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", - "CONFIG_PREVIEW": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true" - } + "POLICY_WITH_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", + "POLICY_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", + "POLICY_BULK_CREATE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/bulk-create", + "POLICY_MARK_DELETE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}/mark-delete", + "POLICY_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/deploy", + "POLICY_CFG_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/fabrics/{}/config-deploy/", + "POLICY_WITH_POLICY_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", + "CONFIG_PREVIEW": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true", + }, } def __init__(self, module): @@ -406,9 +409,7 @@ def __init__(self, module): self.dcnm_version = dcnm_version_supported(self.module) - self.inventory_data = get_fabric_inventory_details( - self.module, self.fabric - ) + self.inventory_data = get_fabric_inventory_details(self.module, self.fabric) self.ip_sn, self.hn_sn = get_ip_sn_dict(self.inventory_data) self.result = dict(changed=False, diff=[], response=[]) @@ -437,9 +438,7 @@ def dcnm_policy_validate_input(self): policy_spec = dict( name=dict(required=True, type="str"), - create_additional_policy=dict( - required=False, type="bool", default=True - ), + create_additional_policy=dict(required=False, type="bool", default=True), description=dict(required=False, type="str", default=""), priority=dict(required=False, type=int, default=500), policy_vars=dict(required=False, type=dict, default={}), @@ -450,11 +449,9 @@ def dcnm_policy_validate_input(self): clist = [] clist.append(cfg) - policy_info, invalid_params = validate_list_of_dicts( - clist, policy_spec - ) + policy_info, invalid_params = validate_list_of_dicts(clist, policy_spec) if invalid_params: - mesg = 'Invalid parameters in playbook: while processing policy "{}", Error: {}'.format( + mesg = 'Invalid parameters in playbook: while processing policy "{0}", Error: {1}'.format( cfg["name"], invalid_params ) self.module.fail_json(msg=mesg) @@ -476,9 +473,7 @@ def dcnm_get_policy_payload_with_template_name(self, pelem, sw): policy_payload["templateName"] = pelem["name"] policy_payload["description"] = pelem["description"] policy_payload["priority"] = pelem["priority"] - policy_payload["create_additional_policy"] = pelem[ - "create_additional_policy" - ] + policy_payload["create_additional_policy"] = pelem["create_additional_policy"] policy_payload["policy_id_given"] = False @@ -515,9 +510,7 @@ def dcnm_get_policy_payload_with_policy_id(self, pelem, sw): policy_payload["policyId"] = policy["policyId"] policy_payload["templateName"] = policy["templateName"] policy_payload["priority"] = pelem["priority"] - policy_payload["create_additional_policy"] = pelem[ - "create_additional_policy" - ] + policy_payload["create_additional_policy"] = pelem["create_additional_policy"] policy_payload["nvPairs"] = policy["nvPairs"] policy_payload["policy_id_given"] = True @@ -746,9 +739,7 @@ def dcnm_policy_get_diff_merge(self): deploy["serialNo"] = policy["serialNumber"] self.changed_dict[0]["deploy"].append(deploy) - if (policy_id is not None) and ( - policy_id not in self.deploy_payload - ): + if (policy_id is not None) and (policy_id not in self.deploy_payload): self.deploy_payload.append(policy_id) def dcnm_policy_get_delete_payload(self, policy): @@ -792,8 +783,10 @@ def dcnm_policy_get_diff_deleted(self): for pl in plist for wp in self.want if ( - (wp["policy_id_given"] is False) and (pl["templateName"] == wp["templateName"]) or ( - wp["policy_id_given"] is True) and (pl["policyId"] == wp["policyId"]) + (wp["policy_id_given"] is False) + and (pl["templateName"] == wp["templateName"]) + or (wp["policy_id_given"] is True) + and (pl["policyId"] == wp["policyId"]) ) ] @@ -840,13 +833,8 @@ def dcnm_policy_get_diff_query(self): # Policy ID is given, Fetch the specific information. pinfo = self.dcnm_policy_get_policy_info_from_dcnm(cfg["name"]) if pinfo != []: - if ( - pinfo["templateName"] - not in self.changed_dict[0]["query"] - ): - self.changed_dict[0]["query"].append( - pinfo["templateName"] - ) + if pinfo["templateName"] not in self.changed_dict[0]["query"]: + self.changed_dict[0]["query"].append(pinfo["templateName"]) self.result["response"].append(pinfo) else: # templateName is given. Note this down @@ -879,10 +867,7 @@ def dcnm_policy_get_diff_query(self): [ t["templateName"] for t in match_pol - if ( - t["templateName"] - not in self.changed_dict[0]["query"] - ) + if (t["templateName"] not in self.changed_dict[0]["query"]) ] ) ) @@ -910,10 +895,12 @@ def dcnm_policy_create_policy(self, policy, command): if "is not unique" in fl["message"]: retries = retries + 1 continue - else: - break - else: + break + + # Don't think we need two break statements here. Test and remove. + break + self.result["response"].append(resp) return resp @@ -996,19 +983,20 @@ def dcnm_policy_send_message_to_dcnm(self): retries += 1 resp = self.dcnm_policy_save_and_deploy(snos) - if ( - resp - and (resp["RETURN_CODE"] != 200) - ): + if resp and (resp["RETURN_CODE"] != 200): self.module.fail_json(msg=resp) - # Get the SYNC status of the switch. Afte config and deploy at fabric level, the status - # MUST be "In-Sync". If not keep retrying + # Get the SYNC status of the switch. Afte config and deploy at fabric level, the status + # MUST be "In-Sync". If not keep retrying path = self.paths["CONFIG_PREVIEW"].format(self.fabric) cp_resp = dcnm_send(self.module, "GET", path, "") - if cp_resp.get ("RETURN_CODE", 0) == 200: - match_data = [item for item in cp_resp.get ("DATA", []) if item["switchId"] in snos] + if cp_resp.get("RETURN_CODE", 0) == 200: + match_data = [ + item + for item in cp_resp.get("DATA", []) + if item["switchId"] in snos + ] else: self.module.fail_json(msg=cp_resp) @@ -1053,10 +1041,7 @@ def dcnm_policy_send_message_to_dcnm(self): # switch if (snos != []) and (delete_flag is True): self.dcnm_policy_save_and_deploy(snos) - if ( - resp - and (resp["RETURN_CODE"] != 200) - ): + if resp and (resp["RETURN_CODE"] != 200): self.module.fail_json(msg=resp) for policy in self.diff_create: @@ -1073,9 +1058,9 @@ def dcnm_policy_send_message_to_dcnm(self): and (resp.get("DATA", None) is not None) ): if resp["DATA"].get("successList", None) is not None: - if "is created successfully" in resp["DATA"][ - "successList" - ][0].get("message"): + if "is created successfully" in resp["DATA"]["successList"][0].get( + "message" + ): policy_id = re.findall( r"POLICY-\d+", resp["DATA"]["successList"][0].get("message"), @@ -1101,9 +1086,9 @@ def dcnm_policy_send_message_to_dcnm(self): and (resp.get("DATA", None) is not None) ): if resp["DATA"].get("successList", None) is not None: - if "is created successfully" in resp["DATA"][ - "successList" - ][0].get("message"): + if "is created successfully" in resp["DATA"]["successList"][0].get( + "message" + ): create_flag = True else: self.module.fail_json(msg=resp) @@ -1123,10 +1108,7 @@ def dcnm_policy_send_message_to_dcnm(self): self.module.fail_json(msg=resp) self.result["changed"] = ( - mark_delete_flag - or delete_flag - or create_flag - or deploy_flag + mark_delete_flag or delete_flag or create_flag or deploy_flag ) def dcnm_translate_switch_info(self, config, ip_sn, hn_sn): @@ -1153,9 +1135,7 @@ def dcnm_translate_config(self, config): # We will remove it from there and add it to individual policies # Get the position of the matching dict - pos = next( - (index for (index, d) in enumerate(config) if "switch" in d), None - ) + pos = next((index for (index, d) in enumerate(config) if "switch" in d), None) if pos is None: return config @@ -1204,11 +1184,10 @@ def dcnm_translate_config(self, config): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( fabric=dict(required=True, type="str"), - config=dict(required=False, type="list", elements='dict'), + config=dict(required=False, type="list", elements="dict"), state=dict( type="str", default="merged", @@ -1217,9 +1196,7 @@ def main(): deploy=dict(required=False, type="bool", default=True), ) - module = AnsibleModule( - argument_spec=element_spec, supports_check_mode=True - ) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_policy = DcnmPolicy(module) @@ -1230,11 +1207,11 @@ def main(): if not dcnm_policy.ip_sn: dcnm_policy.result[ "msg" - ] = "Fabric {} missing on DCNM or does not have any switches".format( + ] = "Fabric {0} missing on DCNM or does not have any switches".format( dcnm_policy.fabric ) module.fail_json( - msg="Fabric {} missing on DCNM or does not have any switches".format( + msg="Fabric {0} missing on DCNM or does not have any switches".format( dcnm_policy.fabric ) ) @@ -1244,7 +1221,7 @@ def main(): if not dcnm_policy.config: if state == "merged" or state == "deleted" or state == "query": module.fail_json( - msg="'config' element is mandatory for state '{}', given = '{}'".format( + msg="'config' element is mandatory for state '{0}', given = '{1}'".format( state, dcnm_policy.config ) ) @@ -1257,9 +1234,7 @@ def main(): if module.params["state"] != "query": # Translate the given playbook config to some convenient format. Each policy should # have the switches to be deployed. - dcnm_policy.config = dcnm_policy.dcnm_translate_config( - dcnm_policy.config - ) + dcnm_policy.config = dcnm_policy.dcnm_translate_config(dcnm_policy.config) # See if this is required dcnm_policy.dcnm_policy_copy_config() diff --git a/plugins/modules/dcnm_rest.py b/plugins/modules/dcnm_rest.py index 1e4478cb1..9ef0a0a51 100644 --- a/plugins/modules/dcnm_rest.py +++ b/plugins/modules/dcnm_rest.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020-2021 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mike Wiebe" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_rest short_description: Send REST API requests to DCNM controller. @@ -44,9 +46,9 @@ type: raw author: - Mike Wiebe (@mikewiebe) -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # This module can be used to send any REST API requests that are supported by # the DCNM controller. # @@ -59,7 +61,7 @@ path: /rest/control/fabrics - name: Set deployment to false in lanAttachList for vrf - dcnm_rest: + dcnm_rest: method: POST path: /rest/top-down/fabrics/fabric1/vrfs/attachments json_data: '[{"vrfName":"sales66_vrf1","lanAttachList":[{"fabric":"fabric1","vrfName":"sales66_vrf1","serialNumber":"FDO21392QKM","vlan":2000,"freeformConfig":"","deployment":false,"extensionValues":"","instanceValues":"{\"loopbackId\":\"\",\"loopbackIpAddress\":\"\",\"loopbackIpV6Address\":\"\"}"}]}]' @@ -69,22 +71,22 @@ data: "{{ lookup('file', 'validate_payload') }}" - name: Validate a template - cisco.dcnm.dcnm_rest: + cisco.dcnm.dcnm_rest: method: POST path: /fm/fmrest/config/templates/validate json_data: "{{ data }}" register: result -''' +""" # noqa -RETURN = ''' +RETURN = """ response: description: - Success or Error Data retrieved from DCNM returned: always type: list elements: dict -''' +""" import json from json.decoder import JSONDecodeError @@ -98,24 +100,19 @@ def main(): # define available arguments/parameters a user can pass to the module argument_spec = dict( - method=dict(required=True, choices=['GET', 'POST', 'PUT', 'DELETE']), - path=dict(required=True, type='str'), - data=dict(type='raw', required=False, default=None, aliases=["json_data"])) + method=dict(required=True, choices=["GET", "POST", "PUT", "DELETE"]), + path=dict(required=True, type="str"), + data=dict(type="raw", required=False, default=None, aliases=["json_data"]), + ) # seed the result dict - result = dict( - changed=False, - response=dict() - ) + result = dict(changed=False, response=dict()) - module = AnsibleModule( - argument_spec=argument_spec, - supports_check_mode=True - ) + module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) - method = module.params['method'] - path = module.params['path'] - for key in ['json_data', 'data']: + method = module.params["method"] + path = module.params["path"] + for key in ["json_data", "data"]: data = module.params.get(key) if data is not None: break @@ -125,16 +122,16 @@ def main(): # Determine if this is valid JSON or not try: json.loads(data) - result['response'] = dcnm_send(module, method, path, data) + result["response"] = dcnm_send(module, method, path, data) except json.JSONDecodeError: # Resend data as text since it's not valid JSON - result['response'] = dcnm_send(module, method, path, data, "text") + result["response"] = dcnm_send(module, method, path, data, "text") - if result['response']['RETURN_CODE'] >= 400: - module.fail_json(msg=result['response']) + if result["response"]["RETURN_CODE"] >= 400: + module.fail_json(msg=result["response"]) module.exit_json(**result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_service_node.py b/plugins/modules/dcnm_service_node.py index ab2c3a3b1..39c175f59 100644 --- a/plugins/modules/dcnm_service_node.py +++ b/plugins/modules/dcnm_service_node.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2021 Cisco and/or its affiliates. +# Copyright (c) 2021-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Karthik Babu Harichandra Babu" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_service_node short_description: Create/Modify/Delete service node based on type and attached interfaces from a DCNM managed VXLAN fabric. @@ -92,9 +94,9 @@ - List of switch interfaces where the service node will be attached type: str required: true -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # L4-L7 Service Insertion: # # Cisco DCNM has the ability to insert Layer 4-Layer 7 (L4-L7) service devices in a data center fabric, and also enables selectively @@ -308,36 +310,42 @@ fabric: Fabric1 service_fabric: external state: query -''' +""" import json import copy -from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import get_fabric_inventory_details, \ - dcnm_send, validate_list_of_dicts, dcnm_get_ip_addr_info, get_ip_sn_dict, dcnm_version_supported +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + get_fabric_inventory_details, + dcnm_send, + validate_list_of_dicts, + dcnm_get_ip_addr_info, + get_ip_sn_dict, + dcnm_version_supported, +) from ansible.module_utils.basic import AnsibleModule class DcnmServiceNode: - dcnm_sn_paths={ + dcnm_sn_paths = { 11: { - "GET_SN_ATTACHED": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", - "GET_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}", - "POST_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes" - }, + "GET_SN_ATTACHED": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", + "GET_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}", + "POST_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes", + }, 12: { - "GET_SN_ATTACHED": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes/?attached-fabric={}", - "GET_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}", - "POST_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes" - } + "GET_SN_ATTACHED": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes/?attached-fabric={}", + "GET_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}", + "POST_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes", + }, } def __init__(self, module): self.module = module self.params = module.params - self.fabric = module.params['fabric'] - self.service_fabric = module.params['service_fabric'] - self.config = copy.deepcopy(module.params.get('config')) + self.fabric = module.params["fabric"] + self.service_fabric = module.params["service_fabric"] + self.config = copy.deepcopy(module.params.get("config")) self.check_mode = False self.have_create = [] self.want_create = [] @@ -354,12 +362,7 @@ def __init__(self, module): else: self.paths = self.dcnm_sn_paths[self.dcnm_version] - self.result = dict( - changed=False, - diff=[], - response=[], - warnings=[] - ) + self.result = dict(changed=False, diff=[], response=[], warnings=[]) self.failed_to_rollback = False self.WAIT_TIME_FOR_DELETE_LOOP = 5 # in seconds @@ -369,55 +372,63 @@ def update_create_params(self, snode): if not snode: return snode - state = self.params['state'] + state = self.params["state"] - if state == 'query': - snode_upd = { - "name": snode['name'] - } + if state == "query": + snode_upd = {"name": snode["name"]} else: serial = [] - for sw in snode['switches']: + for sw in snode["switches"]: sw = dcnm_get_ip_addr_info(self.module, sw, None, None) for ip, ser in self.ip_sn.items(): if ip == sw: serial.append(ser) if not serial: - self.module.fail_json(msg='Fabric: {} does not have the switch: {}' - .format(self.fabric, snode['switches'])) + self.module.fail_json( + msg="Fabric: {0} does not have the switch: {1}".format( + self.fabric, snode["switches"] + ) + ) switchsn = "" - if len(snode['switches']) == 2: + if len(snode["switches"]) == 2: switchsn = str(serial[0]) + "," + str(serial[1]) - if 'vPC' not in snode['attach_interface']: - self.module.fail_json(msg='Fabric: {} - if two switches are provided, vpc is only interface option' - .format(self.fabric)) - elif len(snode['switches']) == 1: + if "vPC" not in snode["attach_interface"]: + self.module.fail_json( + msg="Fabric: {0} - if two switches are provided, vpc is only interface option".format( + self.fabric + ) + ) + elif len(snode["switches"]) == 1: switchsn = str(serial[0]) - if 'vPC' in snode['attach_interface']: - self.module.fail_json(msg='Fabric: {} - For 1 switch, vpc is not the interface option' - .format(self.fabric)) + if "vPC" in snode["attach_interface"]: + self.module.fail_json( + msg="Fabric: {0} - For 1 switch, vpc is not the interface option".format( + self.fabric + ) + ) else: - self.module.fail_json(msg='Fabric: {} - Upto 2 switches only allowed' - .format(self.fabric)) + self.module.fail_json( + msg="Fabric: {0} - Upto 2 switches only allowed".format(self.fabric) + ) - if snode['type'] == 'firewall': - s_type = snode['type'].title() - elif snode['type'] == 'load_balancer': - s_type = 'ADC' - elif snode['type'] == 'virtual_network_function': - s_type = 'VNF' + if snode["type"] == "firewall": + s_type = snode["type"].title() + elif snode["type"] == "load_balancer": + s_type = "ADC" + elif snode["type"] == "virtual_network_function": + s_type = "VNF" snode_upd = { - "name": snode['name'], + "name": snode["name"], "type": s_type, - "formFactor": snode['form_factor'].title(), + "formFactor": snode["form_factor"].title(), "fabricName": self.service_fabric, - "interfaceName": snode['svc_int_name'], + "interfaceName": snode["svc_int_name"], "attachedSwitchSn": switchsn, - "attachedSwitchInterfaceName": snode['attach_interface'], + "attachedSwitchInterfaceName": snode["attach_interface"], "linkTemplateName": "service_link_trunk", "nvPairs": { "MTU": "jumbo", @@ -425,42 +436,44 @@ def update_create_params(self, snode): "ALLOWED_VLANS": "none", "BPDUGUARD_ENABLED": "no", "PORTTYPE_FAST_ENABLED": "true", - "ADMIN_STATE": "true" + "ADMIN_STATE": "true", }, - "attachedFabricName": self.fabric + "attachedFabricName": self.fabric, } return snode_upd def get_have(self): - method = 'GET' + method = "GET" path = self.paths["GET_SN_ATTACHED"].format(self.fabric) snode_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(snode_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(snode_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to Service Node under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to Service Node under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return - if not snode_objects['DATA']: + if not snode_objects["DATA"]: return have_switch = [] - for snode in snode_objects['DATA']: + for snode in snode_objects["DATA"]: get_snode = {} - get_snode.update({'name': snode['name']}) - get_snode.update({'formFactor': snode['formFactor']}) - get_snode.update({'interfaceName': snode['interfaceName']}) - get_snode.update({'type': snode['type']}) - get_snode.update({'attachedFabricName': snode['attachedFabricName']}) - get_snode.update({'attachedSwitchInterfaceName': snode['attachedSwitchInterfaceName']}) - get_snode.update({'attachedSwitchSn': snode['attachedSwitchSn']}) - get_snode.update({'fabricName': snode['fabricName']}) + get_snode.update({"name": snode["name"]}) + get_snode.update({"formFactor": snode["formFactor"]}) + get_snode.update({"interfaceName": snode["interfaceName"]}) + get_snode.update({"type": snode["type"]}) + get_snode.update({"attachedFabricName": snode["attachedFabricName"]}) + get_snode.update( + {"attachedSwitchInterfaceName": snode["attachedSwitchInterfaceName"]} + ) + get_snode.update({"attachedSwitchSn": snode["attachedSwitchSn"]}) + get_snode.update({"fabricName": snode["fabricName"]}) have_switch.append(get_snode) self.have_create = have_switch @@ -484,13 +497,13 @@ def get_diff_delete(self): if self.config: for want_c in self.want_create: for have_c in self.have_create: - if (have_c['name'] == want_c['name']): - diff_delete.append(have_c['name']) + if have_c["name"] == want_c["name"]: + diff_delete.append(have_c["name"]) continue else: for have_c in self.have_create: - diff_delete.append(have_c['name']) + diff_delete.append(have_c["name"]) self.diff_delete = diff_delete @@ -516,7 +529,7 @@ def get_diff_replace(self): found = False for replace_c in self.diff_replace: for have_c in self.have_create: - if have_c['name'] == replace_c['name']: + if have_c["name"] == replace_c["name"]: found = True if not found: @@ -531,17 +544,21 @@ def get_diff_replace_delete(self): for have_c in self.have_create: match_found = False for want_c in self.want_create: - if want_c['name'] == have_c['name']: - if want_c['type'] == have_c['type'] and want_c['attachedFabricName'] == have_c['attachedFabricName'] \ - and want_c['fabricName'] == have_c['fabricName'] and \ - want_c['attachedSwitchInterfaceName'] == have_c['attachedSwitchInterfaceName'] and \ - want_c['attachedSwitchSn'] == have_c['attachedSwitchSn'] and \ - want_c['interfaceName'] == have_c['interfaceName']: + if want_c["name"] == have_c["name"]: + if ( + want_c["type"] == have_c["type"] + and want_c["attachedFabricName"] == have_c["attachedFabricName"] + and want_c["fabricName"] == have_c["fabricName"] + and want_c["attachedSwitchInterfaceName"] + == have_c["attachedSwitchInterfaceName"] + and want_c["attachedSwitchSn"] == have_c["attachedSwitchSn"] + and want_c["interfaceName"] == have_c["interfaceName"] + ): match_found = True if match_found: continue - else: - diff_delete.append(have_c['name']) + + diff_delete.append(have_c["name"]) self.diff_delete = diff_delete @@ -552,13 +569,17 @@ def get_diff_merge(self, replace=False): for want_c in self.want_create: found = False for have_c in self.have_create: - if want_c['name'] == have_c['name'] and want_c['type'] == have_c['type'] and \ - want_c['attachedFabricName'] == have_c['attachedFabricName'] and want_c['fabricName'] == have_c[ - 'fabricName'] and \ - want_c['attachedSwitchInterfaceName'] == have_c['attachedSwitchInterfaceName'] and \ - want_c['attachedSwitchSn'] == have_c['attachedSwitchSn'] and \ - want_c['interfaceName'] == have_c['interfaceName'] and \ - want_c['formFactor'] == have_c['formFactor']: + if ( + want_c["name"] == have_c["name"] + and want_c["type"] == have_c["type"] + and want_c["attachedFabricName"] == have_c["attachedFabricName"] + and want_c["fabricName"] == have_c["fabricName"] + and want_c["attachedSwitchInterfaceName"] + == have_c["attachedSwitchInterfaceName"] + and want_c["attachedSwitchSn"] == have_c["attachedSwitchSn"] + and want_c["interfaceName"] == have_c["interfaceName"] + and want_c["formFactor"] == have_c["formFactor"] + ): found = True if not found: diff_create.append(want_c) @@ -568,45 +589,45 @@ def get_diff_merge(self, replace=False): def get_diff_query(self): query = [] - method = 'GET' + method = "GET" path = self.paths["GET_SN_ATTACHED"].format(self.fabric) snode_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(snode_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(snode_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find Service Node under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find Service Node under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return - if not snode_objects['DATA']: + if not snode_objects["DATA"]: return if self.config: for want_c in self.want_create: - for snode in snode_objects['DATA']: - if want_c['name'] == snode['name']: + for snode in snode_objects["DATA"]: + if want_c["name"] == snode["name"]: query.append(snode) continue else: - for snode in snode_objects['DATA']: + for snode in snode_objects["DATA"]: query.append(snode) self.query = query def push_to_remote(self, is_rollback=False): - method = 'DELETE' + method = "DELETE" if self.diff_delete: for name in self.diff_delete: delete_path = self.paths["GET_SN"].format(self.service_fabric, name) resp = dcnm_send(self.module, method, delete_path) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "delete") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "delete") if fail: if is_rollback: @@ -614,13 +635,13 @@ def push_to_remote(self, is_rollback=False): return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_create: for create in self.diff_create: deploy_path = self.paths["POST_SN"].format(self.service_fabric) resp = dcnm_send(self.module, method, deploy_path, json.dumps(create)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: @@ -628,14 +649,16 @@ def push_to_remote(self, is_rollback=False): return self.failure(resp) - method = 'PUT' + method = "PUT" if self.diff_replace: for replace in self.diff_replace: - replace_path = self.paths["GET_SN"].format(self.service_fabric, replace['name']) + replace_path = self.paths["GET_SN"].format( + self.service_fabric, replace["name"] + ) resp = dcnm_send(self.module, method, replace_path, json.dumps(replace)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: @@ -647,57 +670,72 @@ def validate_input(self): """Parse the playbook values, validate to param specs.""" - state = self.params['state'] + state = self.params["state"] - if state == 'query': + if state == "query": snode_spec = dict( - name=dict(required=True, type='str', length_max=64), + name=dict(required=True, type="str", length_max=64), ) if self.config: # Validate service node params - valid_snode, invalid_params = validate_list_of_dicts(self.config, snode_spec) + valid_snode, invalid_params = validate_list_of_dicts( + self.config, snode_spec + ) for snode in valid_snode: self.validated.append(snode) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: snode_spec = dict( - name=dict(required=True, type='str', length_max=64), - type=dict(required=True, type='str', - choices=['firewall', 'load_balancer', 'virtual_network_function'], - default='firewall'), - form_factor=dict(required=True, type='str', - choices=['physical', 'virtual'], - default='physical'), - svc_int_name=dict(required=True, type='str', length_max=64), - switches=dict(required=True, type='list'), - attach_interface=dict(required=True, type='str'), + name=dict(required=True, type="str", length_max=64), + type=dict( + required=True, + type="str", + choices=["firewall", "load_balancer", "virtual_network_function"], + default="firewall", + ), + form_factor=dict( + required=True, + type="str", + choices=["physical", "virtual"], + default="physical", + ), + svc_int_name=dict(required=True, type="str", length_max=64), + switches=dict(required=True, type="list"), + attach_interface=dict(required=True, type="str"), ) if self.config: msg = None # Validate service node params - valid_snode, invalid_params = validate_list_of_dicts(self.config, snode_spec) + valid_snode, invalid_params = validate_list_of_dicts( + self.config, snode_spec + ) for snode in valid_snode: self.validated.append(snode) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: - state = self.params['state'] + state = self.params["state"] msg = None - if state == 'merged' or state == 'overridden' or \ - state == 'replaced': - msg = "config: element is mandatory for this state {}".format(state) + if state == "merged" or state == "overridden" or state == "replaced": + msg = "config: element is mandatory for this state {0}".format( + state + ) if msg: self.module.fail_json(msg=msg) @@ -709,22 +747,26 @@ def handle_response(self, resp, op): res = resp.copy() - if op == 'query_dcnm': + if op == "query_dcnm": # This if blocks handles responses to the query APIs against DCNM. # Basically all GET operations. # - if res.get('ERROR') == 'Not Found' and res['RETURN_CODE'] == 404: + if res.get("ERROR") == "Not Found" and res["RETURN_CODE"] == 404: return True, False # DCNM version 11.5 returns MESSAGE as "" on success whereas version 12 returns MESSAGE # as "OK" on success. Check for "" and "OK" when RETURN_CODE is 200. - if res['RETURN_CODE'] != 200 or (res['MESSAGE'] != "" and res['MESSAGE'] != "OK"): + if res["RETURN_CODE"] != 200 or ( + res["MESSAGE"] != "" and res["MESSAGE"] != "OK" + ): return False, True return False, False # Responses to all other operations POST and PUT are handled here. # DCNM version 11.5 returns MESSAGE as "" on success whereas version 12 returns MESSAGE # as "OK" on success. Check for "" and "OK" when RETURN_CODE is 200. - if (res.get('MESSAGE') != "" or res.get('MESSAGE') != "OK") and res.get('RETURN_CODE') != 200: + if (res.get("MESSAGE") != "" or res.get("MESSAGE") != "OK") and res.get( + "RETURN_CODE" + ) != 200: fail = True changed = False return fail, changed @@ -746,16 +788,18 @@ def failure(self, resp): if self.failed_to_rollback: msg1 = "FAILED - Attempted rollback of the task has failed, may need manual intervention" else: - msg1 = 'SUCCESS - Attempted rollback of the task has succeeded' + msg1 = "SUCCESS - Attempted rollback of the task has succeeded" res = copy.deepcopy(resp) - res.update({'ROLLBACK_RESULT': msg1}) + res.update({"ROLLBACK_RESULT": msg1}) - if not resp.get('DATA'): - data = copy.deepcopy(resp.get('DATA')) - if data.get('stackTrace'): - data.update({'stackTrace': 'Stack trace is hidden, use \'-vvvvv\' to print it'}) - res.update({'DATA': data}) + if not resp.get("DATA"): + data = copy.deepcopy(resp.get("DATA")) + if data.get("stackTrace"): + data.update( + {"stackTrace": "Stack trace is hidden, use '-vvvvv' to print it"} + ) + res.update({"DATA": data}) if self.module._verbosity >= 5: self.module.fail_json(msg=res) @@ -764,52 +808,56 @@ def failure(self, resp): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - fabric=dict(required=True, type='str'), - service_fabric=dict(required=True, type='str'), - config=dict(required=False, type='list', elements='dict'), - state=dict(default='merged', - choices=['merged', 'replaced', 'deleted', 'overridden', 'query']), + fabric=dict(required=True, type="str"), + service_fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + default="merged", + choices=["merged", "replaced", "deleted", "overridden", "query"], + ), ) - module = AnsibleModule(argument_spec=element_spec, - supports_check_mode=True) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_snode = DcnmServiceNode(module) if not dcnm_snode.ip_sn: - module.fail_json(msg="Fabric {} missing on DCNM or does not have any switches".format(dcnm_snode.fabric)) + module.fail_json( + msg="Fabric {0} missing on DCNM or does not have any switches".format( + dcnm_snode.fabric + ) + ) dcnm_snode.validate_input() dcnm_snode.get_want() dcnm_snode.get_have() - if module.params['state'] == 'merged': + if module.params["state"] == "merged": dcnm_snode.get_diff_merge() - if module.params['state'] == 'replaced': + if module.params["state"] == "replaced": dcnm_snode.get_diff_replace() - if module.params['state'] == 'overridden': + if module.params["state"] == "overridden": dcnm_snode.get_diff_override() - if module.params['state'] == 'deleted': + if module.params["state"] == "deleted": dcnm_snode.get_diff_delete() # - if module.params['state'] == 'query': + if module.params["state"] == "query": dcnm_snode.get_diff_query() - dcnm_snode.result['response'] = dcnm_snode.query + dcnm_snode.result["response"] = dcnm_snode.query if module.check_mode: - dcnm_snode.result['changed'] = False + dcnm_snode.result["changed"] = False module.exit_json(**dcnm_snode.result) if dcnm_snode.diff_create or dcnm_snode.diff_delete or dcnm_snode.diff_replace: - dcnm_snode.result['changed'] = True + dcnm_snode.result["changed"] = True else: module.exit_json(**dcnm_snode.result) @@ -818,5 +866,5 @@ def main(): module.exit_json(**dcnm_snode.result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_service_policy.py b/plugins/modules/dcnm_service_policy.py index 5fc411df5..41c783963 100644 --- a/plugins/modules/dcnm_service_policy.py +++ b/plugins/modules/dcnm_service_policy.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2021 Cisco and/or its affiliates. +# Copyright (c) 2021-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mallik Mudigonda" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_service_policy short_description: DCNM ansible module for managing service policies. @@ -126,13 +128,13 @@ - source port number to be matched to apply this ACL type: str required: true - choices: [any, Min:1, Max:65535] + choices: [any, Min 1, Max 65535] dest_port: description: - destination port number to be matched to apply this ACL type: str required: true - choices: [any, Min:1, Max:65535] + choices: [any, Min 1, Max 65535] action: description: - action to apply for traffic matching the service profile @@ -165,17 +167,17 @@ type: int required: false default: will be auto-generated by DCNM - choices: [Min:1, Max:65535)] + choices: [Min 1, Max 65535)] rev_route_map_num: description: - route map match number for reverse direction type: int required: false default: will be auto-generated by DCNM - choices: [Min:1, Max:65535)] -''' + choices: [Min 1, Max 65535)] +""" -EXAMPLES = ''' +EXAMPLES = """ # L4-L7 Service Insertion: # # Cisco DCNM has the ability to insert Layer 4-Layer 7 (L4-L7) service devices in a data center fabric, and also enables selectively @@ -434,7 +436,7 @@ config: - node_name: it-sn-1 -''' +""" import time import json @@ -445,7 +447,7 @@ dcnm_send, validate_list_of_dicts, dcnm_reset_connection, - dcnm_version_supported + dcnm_version_supported, ) from datetime import datetime @@ -455,34 +457,35 @@ class DcnmServicePolicy: dcnm_sp_paths = { 11: { - "GET_SNODE_WITH_NAME": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}", - "GET_SP_INFO_WITH_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}", - "GET_SERVICE_NODES": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", - "GET_SP_INFO": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", - "GET_DEPLOY_STATUS": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}", - "CREATE_SP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies", - "UPDATE_SP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", - "SP_DETACH_SUFFIX": "/attachments?policy-names=", - "SP_DELETE": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", - "SP_DEPLOY_SUFFIX": "/deployments", - "SP_CFG_SAVE_DEPLOY": "/rest/control/fabrics/{}/config-deploy", - "SP_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}" - }, + "GET_SNODE_WITH_NAME": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}", + "GET_SP_INFO_WITH_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}", + "GET_SERVICE_NODES": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", + "GET_SP_INFO": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", + "GET_DEPLOY_STATUS": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}", + "CREATE_SP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies", + "UPDATE_SP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", + "SP_DETACH_SUFFIX": "/attachments?policy-names=", + "SP_DELETE": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}/{}", + "SP_DEPLOY_SUFFIX": "/deployments", + "SP_CFG_SAVE_DEPLOY": "/rest/control/fabrics/{}/config-deploy", + "SP_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/policies/{}", + }, 12: { - "GET_SNODE_WITH_NAME": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}", - "GET_SP_INFO_WITH_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}", - "GET_SERVICE_NODES": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes?attached-fabric={}", - "GET_SP_INFO": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", - "GET_DEPLOY_STATUS": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}", - "CREATE_SP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies", - "UPDATE_SP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", - "SP_DETACH_SUFFIX": "/attachments?policy-names=", - "SP_DELETE": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", - "SP_DEPLOY_SUFFIX": "/deployments", - "SP_CFG_SAVE_DEPLOY": "/rest/control/fabrics/{}/config-deploy", - "SP_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}" - } + "GET_SNODE_WITH_NAME": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}", + "GET_SP_INFO_WITH_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}", + "GET_SERVICE_NODES": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes?attached-fabric={}", + "GET_SP_INFO": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", + "GET_DEPLOY_STATUS": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}", + "CREATE_SP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies", + "UPDATE_SP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", + "SP_DETACH_SUFFIX": "/attachments?policy-names=", + "SP_DELETE": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}/{}", + "SP_DEPLOY_SUFFIX": "/deployments", + "SP_CFG_SAVE_DEPLOY": "/rest/control/fabrics/{}/config-deploy", + "SP_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/policies/{}", + }, } + def __init__(self, module): self.module = module self.params = module.params @@ -541,14 +544,14 @@ def dcnm_sp_validate_and_build_sp_info(self, cfg, sp_spec, sp_policy_spec): sp_info, invalid_params = validate_list_of_dicts(cfg, sp_spec) if invalid_params: if cfg[0].get("name", " ") != " ": - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Service Policy - " + cfg[0]["name"] + ", " + "\n".join(invalid_params) ) else: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Service Policy - Unknown, " + "\n".join(invalid_params) ) @@ -565,7 +568,7 @@ def dcnm_sp_validate_and_build_sp_info(self, cfg, sp_spec, sp_policy_spec): pol_list, sp_policy_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing policy under Service Policy - " + cfg[0]["name"] + ", " @@ -599,11 +602,7 @@ def dcnm_sp_validate_input(self): cfg.append(citem) if self.module.params["state"] == "deleted": - if ( - item.get("name") - or item.get("node_name") - or item.get("rp_name") - ): + if item.get("name") or item.get("node_name") or item.get("rp_name"): # config for delete state is different. So validate deleted state differently self.dcnm_sp_validate_delete_state_input(cfg) elif self.module.params["state"] == "query": @@ -641,15 +640,13 @@ def dcnm_sp_validate_sp_input(self, cfg): src_network=dict(required=True, type="str"), dest_network=dict(required=True, type="str"), next_hop=dict(required=True, type="ipv4"), - reverse_next_hop=dict(type="ipv4", default=''), + reverse_next_hop=dict(type="ipv4", default=""), reverse=dict(required=False, type=bool, default=True), policy=dict(required=True, type="dict"), ) sp_policy_spec = dict( - proto=dict( - required=True, type="str", choices=["ip", "icmp", "tcp", "udp"] - ), + proto=dict(required=True, type="str", choices=["ip", "icmp", "tcp", "udp"]), src_port=dict(required=True, type="str"), dest_port=dict(required=True, type="str"), action=dict( @@ -695,14 +692,14 @@ def dcnm_sp_validate_delete_state_input(self, cfg): sp_info, invalid_params = validate_list_of_dicts(cfg, sp_delete_spec) if invalid_params: if cfg[0].get("name", " ") != " ": - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Service Policy - " + cfg[0]["name"] + ", " + "".join(invalid_params) ) else: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Service Policy - Unknown, " + "".join(invalid_params) ) @@ -731,7 +728,7 @@ def dcnm_sp_validate_query_state_input(self, cfg): sp_info, invalid_params = validate_list_of_dicts(cfg, sp_query_spec) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Service Policy - " + cfg[0]["name"] + ", " @@ -743,7 +740,9 @@ def dcnm_sp_validate_query_state_input(self, cfg): def dcnm_sp_get_service_node_type(self, sp): - path = self.paths["GET_SNODE_WITH_NAME"].format(self.module.params["service_fabric"], sp["node_name"]) + path = self.paths["GET_SNODE_WITH_NAME"].format( + self.module.params["service_fabric"], sp["node_name"] + ) retries = 0 while retries <= 30: @@ -824,22 +823,18 @@ def dcnm_sp_get_sp_payload(self, sp): ) if sp["policy"][0]["rev_route_map_num"] != 0: if sp["reverse"]: - sp_payload["nvPairs"][ - "ROUTE_MAP_MATCH_SEQUENCE_NUMBER_REVERSE" - ] = str(sp["policy"][0]["rev_route_map_num"]) + sp_payload["nvPairs"]["ROUTE_MAP_MATCH_SEQUENCE_NUMBER_REVERSE"] = str( + sp["policy"][0]["rev_route_map_num"] + ) return sp_payload def dcnm_sp_update_policy_information(self, want, have, cfg): if cfg["policy"].get("action", None) is None: - want["nvPairs"]["ROUTE_MAP_ACTION"] = have["nvPairs"][ - "ROUTE_MAP_ACTION" - ] + want["nvPairs"]["ROUTE_MAP_ACTION"] = have["nvPairs"]["ROUTE_MAP_ACTION"] if cfg["policy"].get("next_hop_option", None) is None: - want["nvPairs"]["NEXT_HOP_OPTION"] = have["nvPairs"][ - "NEXT_HOP_OPTION" - ] + want["nvPairs"]["NEXT_HOP_OPTION"] = have["nvPairs"]["NEXT_HOP_OPTION"] def dcnm_sp_update_want(self): @@ -876,9 +871,7 @@ def dcnm_sp_update_want(self): and (sp["peeringName"] == have["peeringName"]) and (sp["fabricName"] == have["fabricName"]) and (sp["serviceNodeName"] == have["serviceNodeName"]) - and ( - sp["attachedFabricName"] == have["attachedFabricName"] - ) + and (sp["attachedFabricName"] == have["attachedFabricName"]) ) ] if match_have == []: @@ -891,23 +884,15 @@ def dcnm_sp_update_want(self): if ( (sp["policyName"] == cfg["name"]) and (sp["peeringName"] == cfg["rp_name"]) - and ( - sp["fabricName"] - == self.module.params["service_fabric"] - ) + and (sp["fabricName"] == self.module.params["service_fabric"]) and (sp["serviceNodeName"] == cfg["node_name"]) - and ( - sp["attachedFabricName"] - == self.module.params["fabric"] - ) + and (sp["attachedFabricName"] == self.module.params["fabric"]) ) ] if match_cfg == []: continue - self.dcnm_sp_update_policy_information( - sp, match_have[0], match_cfg[0] - ) + self.dcnm_sp_update_policy_information(sp, match_have[0], match_cfg[0]) def dcnm_sp_get_want(self): @@ -952,7 +937,11 @@ def dcnm_sp_get_sp_info_with_service_node(self, node_name): resp["DATA"] (dict): All service policies present on the specified service node """ - path = self.paths["GET_SP_INFO_WITH_SN"].format(self.module.params["service_fabric"], node_name, self.module.params["fabric"]) + path = self.paths["GET_SP_INFO_WITH_SN"].format( + self.module.params["service_fabric"], + node_name, + self.module.params["fabric"], + ) retries = 0 while retries < 30: @@ -963,8 +952,8 @@ def dcnm_sp_get_sp_info_with_service_node(self, node_name): self.dcnm_sp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -996,8 +985,8 @@ def dcnm_sp_get_service_nodes_from_dcnm(self): self.dcnm_sp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -1021,9 +1010,19 @@ def dcnm_sp_get_sp_info_from_dcnm(self, sp, sp_type): """ if sp_type == "PAYLOAD": - path = self.paths["GET_SP_INFO"].format(sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"], sp["policyName"]) + path = self.paths["GET_SP_INFO"].format( + sp["fabricName"], + sp["serviceNodeName"], + sp["attachedFabricName"], + sp["policyName"], + ) else: - path = self.paths["GET_SP_INFO"].format(self.module.params["service_fabric"], sp["node_name"], self.module.params["fabric"], sp["name"]) + path = self.paths["GET_SP_INFO"].format( + self.module.params["service_fabric"], + sp["node_name"], + self.module.params["fabric"], + sp["name"], + ) resource_not_found = False retries = 0 @@ -1047,8 +1046,8 @@ def dcnm_sp_get_sp_info_from_dcnm(self, sp, sp_type): self.dcnm_sp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -1106,7 +1105,9 @@ def dcnm_sp_get_sp_deployment_status(self, sp, refresh): self.have_all[key] = [] # Get all policies and filter out the specific policy to check for attachment details. - path = self.paths["GET_DEPLOY_STATUS"].format(sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"]) + path = self.paths["GET_DEPLOY_STATUS"].format( + sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"] + ) retries = 0 while retries < 30: @@ -1117,18 +1118,17 @@ def dcnm_sp_get_sp_deployment_status(self, sp, refresh): self.dcnm_sp_check_for_errors_in_resp(resp) time.sleep(10) continue - elif resp["RETURN_CODE"] == 200 and resp.get("DATA") == []: + + if resp["RETURN_CODE"] == 200 and resp.get("DATA") == []: time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: self.have_all[key].extend(resp["DATA"]) else: - self.changed_dict[0]["debugs"].append( - {"GET_SP_ATT_STATUS": resp} - ) + self.changed_dict[0]["debugs"].append({"GET_SP_ATT_STATUS": resp}) resp["CHANGED"] = self.changed_dict[0] self.module.fail_json(msg=resp) @@ -1148,8 +1148,10 @@ def dcnm_sp_get_sp_deployment_status(self, sp, refresh): deployed = False retry = False if match_pol: - att_status = {"GET_SP_ATT_STATUS": match_pol[0]["status"], - "PolicyName": match_pol[0]["policyName"]} + att_status = { + "GET_SP_ATT_STATUS": match_pol[0]["status"], + "PolicyName": match_pol[0]["policyName"], + } if att_status not in self.changed_dict[0]["debugs"]: self.changed_dict[0]["debugs"].append(att_status) @@ -1159,18 +1161,14 @@ def dcnm_sp_get_sp_deployment_status(self, sp, refresh): return resp, False, False, match_pol[0]["status"].lower() elif match_pol[0]["status"].lower() == "success": pol_info = {} - pol_info = self.dcnm_sp_combine_policies( - sp, pol_info - ) + pol_info = self.dcnm_sp_combine_policies(sp, pol_info) for path in pol_info: self.dcnm_sp_deploy_sp(path, pol_info[path]) return resp, True, False, "success" elif match_pol[0]["status"].lower() == "out-of-sync": pol_info = {} - pol_info = self.dcnm_sp_combine_policies( - sp, pol_info - ) + pol_info = self.dcnm_sp_combine_policies(sp, pol_info) for path in pol_info: self.dcnm_sp_deploy_sp(path, pol_info[path]) @@ -1183,7 +1181,10 @@ def dcnm_sp_get_sp_deployment_status(self, sp, refresh): return resp, False, True, match_pol[0]["status"].lower() else: self.changed_dict[0]["debugs"].append( - {"GET_SP_ATT_STATUS": "No Matching Policy", "PolicyName": sp["policyName"]} + { + "GET_SP_ATT_STATUS": "No Matching Policy", + "PolicyName": sp["policyName"], + } ) return resp, False, False, None @@ -1217,10 +1218,7 @@ def dcnm_sp_compare_policy_info(self, want, have): mismatch_reasons.append("DCNM_SP_SRC_NETWORK_MISMATCH") if want["destinationNetwork"] != have["destinationNetwork"]: mismatch_reasons.append("DCNM_SP_DST_NETWORK_MISMATCH") - if ( - str(want["reverseEnabled"]).lower() - != str(have["reverseEnabled"]).lower() - ): + if str(want["reverseEnabled"]).lower() != str(have["reverseEnabled"]).lower(): mismatch_reasons.append("DCNM_SP_REV_ENABLED_MISMATCH") if want["policyTemplateName"] != have["policyTemplateName"]: mismatch_reasons.append("DCNM_SP_POL_TEMPLATE_MISMATCH") @@ -1239,13 +1237,9 @@ def dcnm_sp_compare_policy_info(self, want, have): if key == "REVERSE": - if ( - str(have["nvPairs"]["REVERSE"]).lower() == "true" - ): + if str(have["nvPairs"]["REVERSE"]).lower() == "true": have["nvPairs"]["REVERSE"] = True - if ( - str(have["nvPairs"]["REVERSE"]).lower() == "false" - ): + if str(have["nvPairs"]["REVERSE"]).lower() == "false": have["nvPairs"]["REVERSE"] = False if want["nvPairs"][key] != have["nvPairs"].get(key, None): mismatch_reasons.append("DCNM_SP_" + key + "_MISMATCH") @@ -1330,7 +1324,9 @@ def dcnm_sp_get_diff_merge(self): elif rc == "DCNM_SRP_MERGE": # A sp exists and it needs to be updated self.changed_dict[0]["modified"].append(sp) - self.changed_dict[0]["debugs"].append({"Policy": sp["policyName"], "REASONS": reasons}) + self.changed_dict[0]["debugs"].append( + {"Policy": sp["policyName"], "REASONS": reasons} + ) self.diff_modify.append(sp) # Check the 'deploy' flag and decide if this sp is to be deployed @@ -1349,9 +1345,12 @@ def dcnm_sp_get_diff_merge(self): refresh = False while retries < 30: retries += 1 - resp, retry, deployed, status = self.dcnm_sp_get_sp_deployment_status( - have, refresh - ) + ( + resp, + retry, + deployed, + status, + ) = self.dcnm_sp_get_sp_deployment_status(have, refresh) # Sometimes due to timing a policy may return "Out-of-Sync" status. Retry a few times to make sure # that it is really out of sync. @@ -1365,9 +1364,9 @@ def dcnm_sp_get_diff_merge(self): # We deploy when self.deploy is True and: # 1. there are no changes due to this request(rc is DCNM_SRP_DONT_ADD), but the SRP is not deployed # 2. there are changes due to this request (rc is DCNM_SRP_MERGE) - if ( - (rc == "DCNM_SRP_DONT_ADD") and (deployed is False) - ) or (rc == "DCNM_SRP_MERGE"): + if ((rc == "DCNM_SRP_DONT_ADD") and (deployed is False)) or ( + rc == "DCNM_SRP_MERGE" + ): ditem = {} ditem["serviceNodeName"] = sp["serviceNodeName"] ditem["attachedFabricName"] = sp["attachedFabricName"] @@ -1426,15 +1425,11 @@ def dcnm_sp_get_diff_deleted(self): continue # Policy name is not given. Get all policies based on Service Node. If RP # name is given then filter the list based on that - sps = self.dcnm_sp_get_sp_info_with_service_node( - snode["node_name"] - ) + sps = self.dcnm_sp_get_sp_info_with_service_node(snode["node_name"]) # From all the policies filter out the ones that have a matching RP if snode.get("rp_name") != "": match_sps = [ - sp - for sp in sps - if (sp["peeringName"] == snode["rp_name"]) + sp for sp in sps if (sp["peeringName"] == snode["rp_name"]) ] else: match_sps = sps @@ -1476,9 +1471,7 @@ def dcnm_sp_get_diff_query(self): self.result["response"].append(resp) else: # policy name is not included - resp = self.dcnm_sp_get_sp_info_with_service_node( - sp["node_name"] - ) + resp = self.dcnm_sp_get_sp_info_with_service_node(sp["node_name"]) if resp != []: self.result["response"].extend(resp) @@ -1523,9 +1516,7 @@ def dcnm_sp_get_diff_overridden(self): and (sp["peeringName"] == want["peeringName"]) and (sp["fabricName"] == want["fabricName"]) and (sp["serviceNodeName"] == want["serviceNodeName"]) - and ( - sp["attachedFabricName"] == want["attachedFabricName"] - ) + and (sp["attachedFabricName"] == want["attachedFabricName"]) ) ] if match_want == []: @@ -1552,9 +1543,16 @@ def dcnm_sp_create_sp(self, sp, command): """ if command == "POST": - path = self.paths["CREATE_SP"].format(sp["fabricName"], sp["serviceNodeName"]) + path = self.paths["CREATE_SP"].format( + sp["fabricName"], sp["serviceNodeName"] + ) else: - path = self.paths["UPDATE_SP"].format(sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"], sp["policyName"]) + path = self.paths["UPDATE_SP"].format( + sp["fabricName"], + sp["serviceNodeName"], + sp["attachedFabricName"], + sp["policyName"], + ) json_payload = json.dumps(sp) @@ -1594,7 +1592,12 @@ def dcnm_sp_delete_sp(self, sp): resp (dict): Response from DCNM server """ - path = self.paths["SP_DELETE"].format(sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"], sp["policyName"]) + path = self.paths["SP_DELETE"].format( + sp["fabricName"], + sp["serviceNodeName"], + sp["attachedFabricName"], + sp["policyName"], + ) # Delete the service policy json_payload = json.dumps(sp) @@ -1661,7 +1664,12 @@ def dcnm_sp_check_deployment_status(self, sp_list, final_state): att_state = "Unknown" while retries < 50: retries += 1 - resp, retry, deployed, att_state = self.dcnm_sp_get_sp_deployment_status(sp, True) + ( + resp, + retry, + deployed, + att_state, + ) = self.dcnm_sp_get_sp_deployment_status(sp, True) if att_state == final_state: break @@ -1676,7 +1684,15 @@ def dcnm_sp_check_deployment_status(self, sp_list, final_state): time.sleep(30) # After all retries, if the SP did not move to 'final_state' it is an error if att_state != final_state: - self.module.fail_json (msg={"CHANGED": self.changed_dict[0], "FAILURE REASON": "SP "+ sp["policyName"] +" did not reach 'In-Sync' State", "Attach State" : att_state}) + self.module.fail_json( + msg={ + "CHANGED": self.changed_dict[0], + "FAILURE REASON": "SP " + + sp["policyName"] + + " did not reach 'In-Sync' State", + "Attach State": att_state, + } + ) def dcnm_sp_combine_policies(self, sp, pol_info): @@ -1693,7 +1709,9 @@ def dcnm_sp_combine_policies(self, sp, pol_info): pol_info(dict): A dict containing a list of combined policies including the current one """ - path = self.paths["SP_PREFIX"].format(sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"]) + path = self.paths["SP_PREFIX"].format( + sp["fabricName"], sp["serviceNodeName"], sp["attachedFabricName"] + ) if pol_info.get(path) is None: pol_info[path] = {"policyNames": []} @@ -1732,12 +1750,8 @@ def dcnm_sp_check_for_errors_in_resp(self, resp): and resp["DATA"]["error"].get("code") == "InvalidRequest" ): if ( - "not allowed" - not in resp["DATA"]["error"].get("detail", "") - ) and ( - "Deployment" - not in resp["DATA"]["error"].get("detail", "") - ): + "not allowed" not in resp["DATA"]["error"].get("detail", "") + ) and ("Deployment" not in resp["DATA"]["error"].get("detail", "")): # For the case of "InvalidRequest", check if it is because of deployment operation. If not, we should # reset the connection because the token may have expired in the middle of transaction. dcnm_reset_connection(self.module) @@ -1824,9 +1838,7 @@ def dcnm_sp_send_message_to_dcnm(self): if self.diff_delete: delete_pol_info = {} for sp in self.diff_delete: - delete_pol_info = self.dcnm_sp_combine_policies( - sp, delete_pol_info - ) + delete_pol_info = self.dcnm_sp_combine_policies(sp, delete_pol_info) else: delete_pol_info = {} @@ -1897,7 +1909,7 @@ def dcnm_sp_send_message_to_dcnm(self): self.module.fail_json(msg=resp) if delete_flag: - self.dcnm_sp_check_deployment_status (self.diff_delete, "na") + self.dcnm_sp_check_deployment_status(self.diff_delete, "na") # All policies are detached and deployed. Now go ahead and delete the same from the server for sp in self.diff_delete: @@ -1924,9 +1936,7 @@ def dcnm_sp_send_message_to_dcnm(self): if self.diff_deploy: deploy_pol_info = {} for sp in self.diff_deploy: - deploy_pol_info = self.dcnm_sp_combine_policies( - sp, deploy_pol_info - ) + deploy_pol_info = self.dcnm_sp_combine_policies(sp, deploy_pol_info) else: deploy_pol_info = {} @@ -1962,7 +1972,7 @@ def dcnm_sp_send_message_to_dcnm(self): if deploy_flag: # Ensure all the route peerings are properly deployed before returning. - self.dcnm_sp_check_deployment_status (self.diff_deploy, "in-sync") + self.dcnm_sp_check_deployment_status(self.diff_deploy, "in-sync") self.result["changed"] = ( create_flag or modify_flag or delete_flag or deploy_flag @@ -1971,24 +1981,21 @@ def dcnm_sp_send_message_to_dcnm(self): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( fabric=dict(required=True, type="str"), service_fabric=dict(required=True, type="str"), - config=dict(required=False, type="list"), + config=dict(required=False, type="list", elements="dict"), state=dict( type="str", default="merged", choices=["merged", "deleted", "replaced", "query", "overridden"], ), deploy=dict(required=False, type="bool", default=True), - attach=dict(required=False, type="bool"), + attach=dict(required=False, type="bool", default=True), ) - module = AnsibleModule( - argument_spec=element_spec, supports_check_mode=True - ) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_sp = DcnmServicePolicy(module) @@ -2008,16 +2015,14 @@ def main(): if not dcnm_sp.config: if state == "merged" or state == "replaced" or state == "query": module.fail_json( - msg="'config' element is mandatory for state '{}', given = '{}'".format( + msg="'config' element is mandatory for state '{0}', given = '{1}'".format( state, dcnm_sp.config ) ) dcnm_sp.dcnm_sp_validate_input() - if (module.params["state"] != "query") and ( - module.params["state"] != "deleted" - ): + if (module.params["state"] != "query") and (module.params["state"] != "deleted"): dcnm_sp.dcnm_sp_get_want() dcnm_sp.dcnm_sp_get_have() @@ -2028,9 +2033,7 @@ def main(): # they must be purged or defaulted. dcnm_sp.dcnm_sp_update_want() - if (module.params["state"] == "merged") or ( - module.params["state"] == "replaced" - ): + if (module.params["state"] == "merged") or (module.params["state"] == "replaced"): dcnm_sp.dcnm_sp_get_diff_merge() if module.params["state"] == "deleted": diff --git a/plugins/modules/dcnm_service_route_peering.py b/plugins/modules/dcnm_service_route_peering.py index 5c4681aa2..73ff6b13e 100644 --- a/plugins/modules/dcnm_service_route_peering.py +++ b/plugins/modules/dcnm_service_route_peering.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2021 Cisco and/or its affiliates. +# Copyright (c) 2021-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mallik Mudigonda" -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_service_route_peering short_description: DCNM Ansible Module for managing Service Route Peerings. @@ -625,9 +627,9 @@ type: int required: false default: 12345 -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # L4-L7 Service Insertion: # ======================= # @@ -1156,9 +1158,8 @@ service_fabric: external config: node_name: IT-SN-1 # mandatory - node_name: IT-SN-2 # mandatory -''' +""" import re import time @@ -1170,48 +1171,49 @@ dcnm_send, validate_list_of_dicts, dcnm_reset_connection, - dcnm_version_supported + dcnm_version_supported, ) from datetime import datetime + # Route Peering Class object which includes all the required methods and data to configure and maintain Roue peering objects class DcnmServiceRoutePeering: dcnm_srp_paths = { 11: { - "ALLOC_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=SERVICE_NETWORK_VLAN", - "GET_SRP_WITH_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", - "GET_SNODES_FROM_DCNM": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", - "GET_SRP_INFO_FROM_DCNM": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "GET_SRP_DEPLOY_STATUS": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}/attachments", - "CREATE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings", - "UPDATE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "DELETE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "ATTACH_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/attachments", - "DETACH_SRP_SUFFIX": "/attachments?peering-names=", - "DEPLOY_SRP_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", - "DEPLOY_SRP_SUFFIX": "/deployments", - "SRP_FIXED_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", - "SRP_CFG_SAVE_AND_DEPLOY": "/rest/control/fabrics/{}/config-deploy" - - }, + "ALLOC_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=SERVICE_NETWORK_VLAN", + "GET_SRP_WITH_SN": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", + "GET_SNODES_FROM_DCNM": "/appcenter/Cisco/elasticservice/elasticservice-api/?attached-fabric={}", + "GET_SRP_INFO_FROM_DCNM": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "GET_SRP_DEPLOY_STATUS": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}/attachments", + "CREATE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings", + "UPDATE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "DELETE_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "ATTACH_SRP": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}/attachments", + "DETACH_SRP_SUFFIX": "/attachments?peering-names=", + "DEPLOY_SRP_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", + "DEPLOY_SRP_SUFFIX": "/deployments", + "SRP_FIXED_PREFIX": "/appcenter/Cisco/elasticservice/elasticservice-api/fabrics/{}/service-nodes/{}/peerings/{}", + "SRP_CFG_SAVE_AND_DEPLOY": "/rest/control/fabrics/{}/config-deploy", + }, 12: { - "ALLOC_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=SERVICE_NETWORK_VLAN", - "GET_SRP_WITH_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", - "GET_SNODES_FROM_DCNM": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes?attached-fabric={}", - "GET_SRP_INFO_FROM_DCNM": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "GET_SRP_DEPLOY_STATUS": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}/attachments", - "CREATE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings", - "UPDATE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "DELETE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", - "ATTACH_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/attachments", - "DETACH_SRP_SUFFIX": "/attachments?peering-names=", - "DEPLOY_SRP_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", - "DEPLOY_SRP_SUFFIX": "/deployments", - "SRP_FIXED_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", - "SRP_CFG_SAVE_AND_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-deploy" - } + "ALLOC_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=SERVICE_NETWORK_VLAN", + "GET_SRP_WITH_SN": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", + "GET_SNODES_FROM_DCNM": "/appcenter/cisco/ndfc/api/v1/elastic-service/service-nodes?attached-fabric={}", + "GET_SRP_INFO_FROM_DCNM": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "GET_SRP_DEPLOY_STATUS": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}/attachments", + "CREATE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings", + "UPDATE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "DELETE_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/{}", + "ATTACH_SRP": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}/attachments", + "DETACH_SRP_SUFFIX": "/attachments?peering-names=", + "DEPLOY_SRP_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", + "DEPLOY_SRP_SUFFIX": "/deployments", + "SRP_FIXED_PREFIX": "/appcenter/cisco/ndfc/api/v1/elastic-service/fabrics/{}/service-nodes/{}/peerings/{}", + "SRP_CFG_SAVE_AND_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-deploy", + }, } + def __init__(self, module): self.module = module self.params = module.params @@ -1290,14 +1292,14 @@ def dcnm_srp_validate_and_build_srp_info( srp_info, invalid_params = validate_list_of_dicts(cfg, srp_spec) if invalid_params: if cfg[0].get("name", " ") != " ": - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - " + cfg[0]["name"] + ", " + "\n".join(invalid_params) ) else: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - Unknown, " + "\n".join(invalid_params) ) @@ -1311,11 +1313,9 @@ def dcnm_srp_validate_and_build_srp_info( in_list.append(item[net_name1]) # Validate inside and outside network dicts from route peering info - in_net, invalid_params = validate_list_of_dicts( - in_list, srp_network_spec - ) + in_net, invalid_params = validate_list_of_dicts(in_list, srp_network_spec) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Network/Arm - " + net_name1 + ", under Route Peering - " @@ -1333,7 +1333,7 @@ def dcnm_srp_validate_and_build_srp_info( out_list, srp_network_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Network/Arm - " + net_name2 + ", under Route Peering - " @@ -1351,7 +1351,7 @@ def dcnm_srp_validate_and_build_srp_info( in_list, srp_prof1_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Profile under Network/Arm - " + net_name1 + ", under Route Peering - " @@ -1369,7 +1369,7 @@ def dcnm_srp_validate_and_build_srp_info( out_list, srp_prof2_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Profile under Network/Arm - " + net_name2 + ", under Route Peering - " @@ -1393,7 +1393,7 @@ def dcnm_srp_validate_and_build_srp_info( in_list, srp_static_route_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Static Route under Network/Arm - " + net_name1 + ", under Route Peering - " @@ -1415,7 +1415,7 @@ def dcnm_srp_validate_and_build_srp_info( out_list, srp_static_route_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Static Route under Network/Arm - " + net_name2 + ", under Route Peering - " @@ -1449,7 +1449,7 @@ def dcnm_srp_translate_deploy_mode(self, item): } if item["deploy_mode"] not in trans_dict.keys(): - mesg = "Invalid 'deploy_mode' = {}, in playbook, Expected values = {}".format( + mesg = "Invalid 'deploy_mode' = {0}, in playbook, Expected values = {1}".format( item["deploy_mode"], trans_dict.keys() ) self.module.fail_json(msg=mesg) @@ -1498,7 +1498,7 @@ def dcnm_srp_validate_input(self): pass else: if "deploy_mode" not in item: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - " + item["name"] + ", deploy_mode - Required parameter not found" @@ -1518,9 +1518,7 @@ def dcnm_srp_validate_input(self): if (item["deploy_mode"].lower() == "onearmadc") or ( item["deploy_mode"].lower() == "twoarmadc" ): - self.dcnm_srp_validate_adc_input( - cfg, item["deploy_mode"].lower() - ) + self.dcnm_srp_validate_adc_input(cfg, item["deploy_mode"].lower()) cfg.remove(citem) def dcnm_srp_validate_intra_tenant_firewall_input(self, cfg): @@ -1771,14 +1769,14 @@ def dcnm_srp_validate_delete_state_input(self, cfg): srp_info, invalid_params = validate_list_of_dicts(cfg, srp_delete_spec) if invalid_params: if cfg[0].get("name", " ") != " ": - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - " + cfg[0]["name"] + ", " + "".join(invalid_params) ) else: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - Unknown, " + "".join(invalid_params) ) @@ -1807,7 +1805,7 @@ def dcnm_srp_validate_query_state_input(self, cfg): srp_info, invalid_params = validate_list_of_dicts(cfg, srp_query_spec) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format( + mesg = "Invalid parameters in playbook: {0}".format( "while processing Route Peering - " + cfg[0]["name"] + ", " @@ -1869,9 +1867,7 @@ def dcnm_srp_get_payload_route_info(self, srp, srp_payload): if srp_payload["deploymentMode"] == "InterTenantFW": srp_payload["routes"].append(out_route_info) - srp_payload["routes"][1][ - "templateName" - ] = "service_static_route" + srp_payload["routes"][1]["templateName"] = "service_static_route" nv = srp_payload["routes"][1]["nvPairs"] @@ -1898,14 +1894,10 @@ def dcnm_srp_get_payload_route_info(self, srp, srp_payload): nv["NEIGHBOR_IP"] = srp[net_name1]["profile"]["ipv4_neighbor"] nv["LOOPBACK_IP"] = srp[net_name1]["profile"]["ipv4_lo"] - nv["PEER_LOOPBACK_IP"] = srp[net_name1]["profile"][ - "ipv4_vpc_peer_lo" - ] + nv["PEER_LOOPBACK_IP"] = srp[net_name1]["profile"]["ipv4_vpc_peer_lo"] nv["NEIGHBOR_IPV6"] = srp[net_name1]["profile"]["ipv6_neighbor"] nv["LOOPBACK_IPV6"] = srp[net_name1]["profile"]["ipv6_lo"] - nv["PEER_LOOPBACK_IPV6"] = srp[net_name1]["profile"][ - "ipv6_vpc_peer_lo" - ] + nv["PEER_LOOPBACK_IPV6"] = srp[net_name1]["profile"]["ipv6_vpc_peer_lo"] nv["ROUTE_MAP_TAG"] = srp[net_name1]["profile"]["route_map_tag"] nv["DESC"] = srp[net_name1]["profile"]["neigh_int_descr"] nv["LOCAL_ASN"] = srp[net_name1]["profile"]["local_asn"] @@ -1924,24 +1916,14 @@ def dcnm_srp_get_payload_route_info(self, srp, srp_payload): nv["NEIGHBOR_IP"] = srp[net_name2]["profile"]["ipv4_neighbor"] nv["LOOPBACK_IP"] = srp[net_name2]["profile"]["ipv4_lo"] - nv["PEER_LOOPBACK_IP"] = srp[net_name2]["profile"][ - "ipv4_vpc_peer_lo" - ] - nv["NEIGHBOR_IPV6"] = srp[net_name2]["profile"][ - "ipv6_neighbor" - ] + nv["PEER_LOOPBACK_IP"] = srp[net_name2]["profile"]["ipv4_vpc_peer_lo"] + nv["NEIGHBOR_IPV6"] = srp[net_name2]["profile"]["ipv6_neighbor"] nv["LOOPBACK_IPV6"] = srp[net_name2]["profile"]["ipv6_lo"] - nv["PEER_LOOPBACK_IPV6"] = srp[net_name2]["profile"][ - "ipv6_vpc_peer_lo" - ] - nv["ROUTE_MAP_TAG"] = srp[net_name2]["profile"][ - "route_map_tag" - ] + nv["PEER_LOOPBACK_IPV6"] = srp[net_name2]["profile"]["ipv6_vpc_peer_lo"] + nv["ROUTE_MAP_TAG"] = srp[net_name2]["profile"]["route_map_tag"] nv["DESC"] = srp[net_name2]["profile"]["neigh_int_descr"] nv["LOCAL_ASN"] = srp[net_name2]["profile"]["local_asn"] - nv["ADVERTISE_HOST_ROUTE"] = srp[net_name2]["profile"][ - "adv_host" - ] + nv["ADVERTISE_HOST_ROUTE"] = srp[net_name2]["profile"]["adv_host"] nv["ADMIN_STATE"] = True nv["VRF_NAME"] = srp[net_name2]["vrf"] @@ -1969,10 +1951,10 @@ def dcnm_srp_allocate_vlan_id(self, fabric, srp): if resp["RETURN_CODE"] == 200: break - else: - self.dcnm_srp_check_for_errors_in_resp(resp) - time.sleep(1) - continue + + self.dcnm_srp_check_for_errors_in_resp(resp) + time.sleep(1) + continue if resp["RETURN_CODE"] != 200: resp["CHANGED"] = self.changed_dict[0] @@ -2024,9 +2006,7 @@ def dcnm_srp_get_common_payload(self, srp, deploy_mode): srp_payload = {"serviceNetworks": [], "enabled": self.attach} - if (deploy_mode == "intratenantfw") or ( - deploy_mode == "intertenantfw" - ): + if (deploy_mode == "intratenantfw") or (deploy_mode == "intertenantfw"): net_name1 = "inside_network" net_name2 = "outside_network" networkType1 = "InsideNetworkFW" @@ -2049,66 +2029,58 @@ def dcnm_srp_get_common_payload(self, srp, deploy_mode): srp_payload["serviceNetworks"][0]["vrfName"] = srp[net_name1]["vrf"] srp_payload["serviceNetworks"][0]["networkType"] = networkType1 - srp_payload["serviceNetworks"][0]["networkName"] = srp[net_name1][ - "name" - ] + srp_payload["serviceNetworks"][0]["networkName"] = srp[net_name1]["name"] srp_payload["serviceNetworks"][0]["vlanId"] = srp[net_name1]["vlan_id"] # Inside Network Profile srp_payload["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"] = srp[ net_name1 ]["profile"]["ipv4_gw"] - srp_payload["serviceNetworks"][0]["nvPairs"][ - "gatewayIpV6Address" - ] = srp[net_name1]["profile"]["ipv6_gw"] - srp_payload["serviceNetworks"][0]["nvPairs"]["vlanName"] = srp[ + srp_payload["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"] = srp[ net_name1 - ]["profile"]["vlan_name"] + ]["profile"]["ipv6_gw"] + srp_payload["serviceNetworks"][0]["nvPairs"]["vlanName"] = srp[net_name1][ + "profile" + ]["vlan_name"] srp_payload["serviceNetworks"][0]["nvPairs"]["intfDescription"] = srp[ net_name1 ]["profile"]["int_descr"] - srp_payload["serviceNetworks"][0]["nvPairs"]["tag"] = srp[net_name1][ - "profile" - ]["tag"] - srp_payload["serviceNetworks"][0]["nvPairs"]["vlanId"] = srp[ - net_name1 - ]["vlan_id"] + srp_payload["serviceNetworks"][0]["nvPairs"]["tag"] = srp[net_name1]["profile"][ + "tag" + ] + srp_payload["serviceNetworks"][0]["nvPairs"]["vlanId"] = srp[net_name1][ + "vlan_id" + ] if deploy_mode != "onearmadc": # Outside Network srp_payload["serviceNetworks"].append(out_network_defaults) - srp_payload["serviceNetworks"][1]["vrfName"] = srp[net_name2][ - "vrf" - ] + srp_payload["serviceNetworks"][1]["vrfName"] = srp[net_name2]["vrf"] srp_payload["serviceNetworks"][1]["networkType"] = networkType2 - srp_payload["serviceNetworks"][1]["networkName"] = srp[net_name2][ - "name" - ] - srp_payload["serviceNetworks"][1]["vlanId"] = srp[net_name2][ - "vlan_id" - ] + srp_payload["serviceNetworks"][1]["networkName"] = srp[net_name2]["name"] + srp_payload["serviceNetworks"][1]["vlanId"] = srp[net_name2]["vlan_id"] # Outside Network Profile - srp_payload["serviceNetworks"][1]["nvPairs"][ - "gatewayIpAddress" - ] = srp[net_name2]["profile"]["ipv4_gw"] - srp_payload["serviceNetworks"][1]["nvPairs"][ - "gatewayIpV6Address" - ] = srp[net_name2]["profile"]["ipv6_gw"] - srp_payload["serviceNetworks"][1]["nvPairs"]["vlanName"] = srp[ + srp_payload["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"] = srp[ net_name2 - ]["profile"]["vlan_name"] - srp_payload["serviceNetworks"][1]["nvPairs"][ - "intfDescription" - ] = srp[net_name2]["profile"]["int_descr"] - srp_payload["serviceNetworks"][1]["nvPairs"]["tag"] = srp[ + ]["profile"]["ipv4_gw"] + srp_payload["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"] = srp[ net_name2 - ]["profile"]["tag"] - srp_payload["serviceNetworks"][1]["nvPairs"]["vlanId"] = srp[ + ]["profile"]["ipv6_gw"] + srp_payload["serviceNetworks"][1]["nvPairs"]["vlanName"] = srp[net_name2][ + "profile" + ]["vlan_name"] + srp_payload["serviceNetworks"][1]["nvPairs"]["intfDescription"] = srp[ net_name2 - ]["vlan_id"] + ]["profile"]["int_descr"] + srp_payload["serviceNetworks"][1]["nvPairs"]["tag"] = srp[net_name2][ + "profile" + ]["tag"] + srp_payload["serviceNetworks"][1]["nvPairs"]["vlanId"] = srp[net_name2][ + "vlan_id" + ] # Service Node and Fabric details srp_payload["serviceNodeName"] = srp["node_name"] @@ -2277,46 +2249,28 @@ def dcnm_srp_update_route_info(self, want, have, cfg): wnv = want["routes"][1]["nvPairs"] hnv = have["routes"][1]["nvPairs"] - if ( - cfg[net_name2]["profile"].get("ipv4_neighbor", None) - is None - ): + if cfg[net_name2]["profile"].get("ipv4_neighbor", None) is None: wnv["NEIGHBOR_IP"] = hnv["NEIGHBOR_IP"] if cfg[net_name2]["profile"].get("ipv4_lo", None) is None: wnv["LOOPBACK_IP"] = hnv["LOOPBACK_IP"] - if ( - cfg[net_name2]["profile"].get("ipv4_vpc_peer_lo", None) - is None - ): + if cfg[net_name2]["profile"].get("ipv4_vpc_peer_lo", None) is None: wnv["PEER_LOOPBACK_IP"] = hnv["PEER_LOOPBACK_IP"] - if ( - cfg[net_name2]["profile"].get("ipv6_neighbor", None) - is None - ): + if cfg[net_name2]["profile"].get("ipv6_neighbor", None) is None: wnv["NEIGHBOR_IPV6"] = hnv["NEIGHBOR_IPV6"] if cfg[net_name2]["profile"].get("ipv6_lo", None) is None: wnv["LOOPBACK_IPV6"] = hnv["LOOPBACK_IPV6"] - if ( - cfg[net_name2]["profile"].get("ipv6_vpc_peer_lo", None) - is None - ): + if cfg[net_name2]["profile"].get("ipv6_vpc_peer_lo", None) is None: wnv["PEER_LOOPBACK_IPV6"] = hnv["PEER_LOOPBACK_IPV6"] - if ( - cfg[net_name2]["profile"].get("route_map_tag", None) - is None - ): + if cfg[net_name2]["profile"].get("route_map_tag", None) is None: wnv["ROUTE_MAP_TAG"] = hnv["ROUTE_MAP_TAG"] - if ( - cfg[net_name2]["profile"].get("neigh_int_descr", None) - is None - ): + if cfg[net_name2]["profile"].get("neigh_int_descr", None) is None: wnv["DESC"] = hnv["DESC"] if cfg[net_name2]["profile"].get("loacl_asn", None) is None: @@ -2356,7 +2310,6 @@ def dcnm_srp_update_common_info(self, want, have, cfg): net_name1 = "first_arm" net_name2 = "second_arm" - if want["deploymentMode"].lower() != "intertenantfw": if cfg.get("reverse_next_hop", None) is None: want["reverseNextHopIp"] = have.get("reverseNextHopIp") @@ -2369,14 +2322,12 @@ def dcnm_srp_update_common_info(self, want, have, cfg): ] if cfg[net_name1].get("name", None) is None: - want["serviceNetworks"][0]["networkName"] = have[ - "serviceNetworks" - ][0]["networkName"] + want["serviceNetworks"][0]["networkName"] = have["serviceNetworks"][0][ + "networkName" + ] if cfg[net_name1].get("vlan_id", None) is None: - want["serviceNetworks"][0]["vlanId"] = have["serviceNetworks"][0][ - "vlanId" - ] + want["serviceNetworks"][0]["vlanId"] = have["serviceNetworks"][0]["vlanId"] # Inside Network Profile if cfg[net_name1]["profile"].get("ipv4_gw", None) is None: @@ -2390,56 +2341,56 @@ def dcnm_srp_update_common_info(self, want, have, cfg): ][0]["nvPairs"]["gatewayIpV6Address"] if cfg[net_name1]["profile"].get("vlan_name", None) is None: - want["serviceNetworks"][0]["nvPairs"]["vlanName"] = have[ - "serviceNetworks" - ][0]["nvPairs"]["vlanName"] + want["serviceNetworks"][0]["nvPairs"]["vlanName"] = have["serviceNetworks"][ + 0 + ]["nvPairs"]["vlanName"] if cfg[net_name1]["profile"].get("int_descr", None) is None: - hif_desc = have["serviceNetworks"][0]["nvPairs"][ - "intfDescription" - ].split(" ")[:-1] - want["serviceNetworks"][0]["nvPairs"][ - "intfDescription" - ] = " ".join(hif_desc) + hif_desc = have["serviceNetworks"][0]["nvPairs"]["intfDescription"].split( + " " + )[:-1] + want["serviceNetworks"][0]["nvPairs"]["intfDescription"] = " ".join( + hif_desc + ) if cfg[net_name1]["profile"].get("tag", None) is None: - want["serviceNetworks"][0]["nvPairs"]["tag"] = have[ - "serviceNetworks" - ][0]["nvPairs"]["tag"] + want["serviceNetworks"][0]["nvPairs"]["tag"] = have["serviceNetworks"][0][ + "nvPairs" + ]["tag"] if cfg[net_name1].get("vlan_id", None) is None: - want["serviceNetworks"][0]["nvPairs"]["vlanId"] = have[ - "serviceNetworks" - ][0]["nvPairs"]["vlanId"] + want["serviceNetworks"][0]["nvPairs"]["vlanId"] = have["serviceNetworks"][ + 0 + ]["nvPairs"]["vlanId"] if want["deploymentMode"].lower() != "onearmadc": # Outside Network if cfg[net_name2].get("vrf", None) is None: - want["serviceNetworks"][1]["vrfName"] = have[ - "serviceNetworks" - ][1]["vrfName"] + want["serviceNetworks"][1]["vrfName"] = have["serviceNetworks"][1][ + "vrfName" + ] if cfg[net_name2].get("name", None) is None: - want["serviceNetworks"][1]["networkName"] = have[ - "serviceNetworks" - ][1]["networkName"] + want["serviceNetworks"][1]["networkName"] = have["serviceNetworks"][1][ + "networkName" + ] if cfg[net_name2].get("vlan_id", None) is None: - want["serviceNetworks"][1]["vlanId"] = have["serviceNetworks"][ - 1 - ]["vlanId"] + want["serviceNetworks"][1]["vlanId"] = have["serviceNetworks"][1][ + "vlanId" + ] # Outside Network Profile if cfg[net_name2]["profile"].get("ipv4_gw", None) is None: - want["serviceNetworks"][1]["nvPairs"][ - "gatewayIpAddress" - ] = have["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"] + want["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"] = have[ + "serviceNetworks" + ][1]["nvPairs"]["gatewayIpAddress"] if cfg[net_name2]["profile"].get("ipv6_gw", None) is None: - want["serviceNetworks"][1]["nvPairs"][ - "gatewayIpV6Address" - ] = have["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"] + want["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"] = have[ + "serviceNetworks" + ][1]["nvPairs"]["gatewayIpV6Address"] if cfg[net_name2]["profile"].get("vlan_name", None) is None: want["serviceNetworks"][1]["nvPairs"]["vlanName"] = have[ @@ -2450,14 +2401,14 @@ def dcnm_srp_update_common_info(self, want, have, cfg): hif_desc = have["serviceNetworks"][1]["nvPairs"][ "intfDescription" ].split(" ")[:-1] - want["serviceNetworks"][1]["nvPairs"][ - "intfDescription" - ] = " ".join(hif_desc) + want["serviceNetworks"][1]["nvPairs"]["intfDescription"] = " ".join( + hif_desc + ) if cfg[net_name2]["profile"].get("tag", None) is None: - want["serviceNetworks"][1]["nvPairs"]["tag"] = have[ - "serviceNetworks" - ][1]["nvPairs"]["tag"] + want["serviceNetworks"][1]["nvPairs"]["tag"] = have["serviceNetworks"][ + 1 + ]["nvPairs"]["tag"] if cfg[net_name2].get("vlan_id", None) is None: want["serviceNetworks"][1]["nvPairs"]["vlanId"] = have[ @@ -2502,9 +2453,7 @@ def dcnm_srp_update_want(self): (srp["peeringName"] == have["peeringName"]) and (srp["fabricName"] == have["fabricName"]) and (srp["serviceNodeName"] == have["serviceNodeName"]) - and ( - srp["attachedFabricName"] == have["attachedFabricName"] - ) + and (srp["attachedFabricName"] == have["attachedFabricName"]) ) ] if match_have == []: @@ -2516,15 +2465,9 @@ def dcnm_srp_update_want(self): for cfg in self.config if ( (srp["peeringName"] == cfg["name"]) - and ( - srp["fabricName"] - == self.module.params["service_fabric"] - ) + and (srp["fabricName"] == self.module.params["service_fabric"]) and (srp["serviceNodeName"] == cfg["node_name"]) - and ( - srp["attachedFabricName"] - == self.module.params["fabric"] - ) + and (srp["attachedFabricName"] == self.module.params["fabric"]) ) ] if match_cfg == []: @@ -2573,7 +2516,11 @@ def dcnm_srp_get_srp_info_with_service_node(self, node_name): resp["DATA"] (dict): All route peerings present on the specified service node """ - path = self.paths["GET_SRP_WITH_SN"].format(self.module.params["service_fabric"], node_name, self.module.params["fabric"]) + path = self.paths["GET_SRP_WITH_SN"].format( + self.module.params["service_fabric"], + node_name, + self.module.params["fabric"], + ) retries = 0 while retries < 30: retries += 1 @@ -2583,8 +2530,8 @@ def dcnm_srp_get_srp_info_with_service_node(self, node_name): self.dcnm_srp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -2615,8 +2562,8 @@ def dcnm_srp_get_service_nodes_from_dcnm(self): self.dcnm_srp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -2640,9 +2587,19 @@ def dcnm_srp_get_srp_info_from_dcnm(self, srp, srp_type): """ if srp_type == "PAYLOAD": - path = self.paths["GET_SRP_INFO_FROM_DCNM"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"], srp["peeringName"]) + path = self.paths["GET_SRP_INFO_FROM_DCNM"].format( + srp["fabricName"], + srp["serviceNodeName"], + srp["attachedFabricName"], + srp["peeringName"], + ) else: - path = self.paths["GET_SRP_INFO_FROM_DCNM"].format(self.module.params["service_fabric"], srp["node_name"], self.module.params["fabric"], srp["name"]) + path = self.paths["GET_SRP_INFO_FROM_DCNM"].format( + self.module.params["service_fabric"], + srp["node_name"], + self.module.params["fabric"], + srp["name"], + ) resource_not_found = False retries = 0 @@ -2666,8 +2623,8 @@ def dcnm_srp_get_srp_info_from_dcnm(self, srp, srp_type): self.dcnm_srp_check_for_errors_in_resp(resp) time.sleep(10) continue - else: - break + + break if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: resp["RETRIES"] = retries @@ -2719,22 +2676,42 @@ def dcnm_srp_compare_common_info(self, want, have): # Global if want.get("deploymentMode") != have.get("deploymentMode"): mismatch_reasons.append( - {"DCNM_SRP_DM_NO_MATCH": [want.get("deploymentMode"), have.get("deploymentMode")]} + { + "DCNM_SRP_DM_NO_MATCH": [ + want.get("deploymentMode"), + have.get("deploymentMode"), + ] + } ) if want["serviceNodeType"] != have["serviceNodeType"]: mismatch_reasons.append( - {"DCNM_SRP_SNT_NO_MATCH": [want["serviceNodeType"], have["serviceNodeType"]]} + { + "DCNM_SRP_SNT_NO_MATCH": [ + want["serviceNodeType"], + have["serviceNodeType"], + ] + } ) - if want.get("nextHopIp", '') != have.get("nextHopIp",''): + if want.get("nextHopIp", "") != have.get("nextHopIp", ""): mismatch_reasons.append( - {"DCNM_SRP_NHIP_NO_MATCH": [want.get("nextHopIp", ''), have.get("nextHopIp", '')]} + { + "DCNM_SRP_NHIP_NO_MATCH": [ + want.get("nextHopIp", ""), + have.get("nextHopIp", ""), + ] + } ) - if want.get("reverseNextHopIp", '') != have.get("reverseNextHopIp", ''): + if want.get("reverseNextHopIp", "") != have.get("reverseNextHopIp", ""): mismatch_reasons.append( - {"DCNM_SRP_REV_NHIP_NO_MATCH": [want.get("reverseNextHopIp", ''), have.get("reverseNextHopIp", '')]} + { + "DCNM_SRP_REV_NHIP_NO_MATCH": [ + want.get("reverseNextHopIp", ""), + have.get("reverseNextHopIp", ""), + ] + } ) # Inside Network @@ -2743,7 +2720,12 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["vrfName"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_VRF_NO_MATCH": [want["serviceNetworks"][0]["vrfName"], have["serviceNetworks"][0]["vrfName"]]} + { + "DCNM_SRP_IN_VRF_NO_MATCH": [ + want["serviceNetworks"][0]["vrfName"], + have["serviceNetworks"][0]["vrfName"], + ] + } ) if ( @@ -2751,7 +2733,12 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["networkType"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_NT_NO_MATCH": [want["serviceNetworks"][0]["networkType"], have["serviceNetworks"][0]["networkType"]]} + { + "DCNM_SRP_IN_NT_NO_MATCH": [ + want["serviceNetworks"][0]["networkType"], + have["serviceNetworks"][0]["networkType"], + ] + } ) if ( @@ -2759,7 +2746,12 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["networkName"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_NN_NO_MATCH": [want["serviceNetworks"][0]["networkName"], have["serviceNetworks"][0]["networkName"]]} + { + "DCNM_SRP_IN_NN_NO_MATCH": [ + want["serviceNetworks"][0]["networkName"], + have["serviceNetworks"][0]["networkName"], + ] + } ) if ( @@ -2767,7 +2759,12 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["vlanId"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_VID_NO_MATCH": [want["serviceNetworks"][0].get("vlanId"), have["serviceNetworks"][0]["vlanId"]]} + { + "DCNM_SRP_IN_VID_NO_MATCH": [ + want["serviceNetworks"][0].get("vlanId"), + have["serviceNetworks"][0]["vlanId"], + ] + } ) # Inside Network Profile @@ -2776,7 +2773,12 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_IPV4GW_NO_MATCH": [want["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"], have["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"]]} + { + "DCNM_SRP_IN_IPV4GW_NO_MATCH": [ + want["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"], + have["serviceNetworks"][0]["nvPairs"]["gatewayIpAddress"], + ] + } ) if ( @@ -2784,27 +2786,37 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_IPV6GW_NO_MATCH": [want["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"], have["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"]]} + { + "DCNM_SRP_IN_IPV6GW_NO_MATCH": [ + want["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"], + have["serviceNetworks"][0]["nvPairs"]["gatewayIpV6Address"], + ] + } ) if ( want["serviceNetworks"][0]["nvPairs"]["vlanName"] != have["serviceNetworks"][0]["nvPairs"]["vlanName"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_VNAME_NO_MATCH": [want["serviceNetworks"][0]["nvPairs"]["vlanName"], have["serviceNetworks"][0]["nvPairs"]["vlanName"]]} + { + "DCNM_SRP_IN_VNAME_NO_MATCH": [ + want["serviceNetworks"][0]["nvPairs"]["vlanName"], + have["serviceNetworks"][0]["nvPairs"]["vlanName"], + ] + } ) # When we get the SRP inmformation from have, the intfDescription would have been modified and some meta data added. so ignore the meta data # when comparing the interface descriptions if want["serviceNetworks"][0]["nvPairs"]["intfDescription"] != "": - wif_desc = want["serviceNetworks"][0]["nvPairs"][ - "intfDescription" - ].split(" ") + wif_desc = want["serviceNetworks"][0]["nvPairs"]["intfDescription"].split( + " " + ) else: wif_desc = [] - hif_desc = have["serviceNetworks"][0]["nvPairs"][ - "intfDescription" - ].split(" ")[:-1] + hif_desc = have["serviceNetworks"][0]["nvPairs"]["intfDescription"].split(" ")[ + :-1 + ] if wif_desc != hif_desc: mismatch_reasons.append( {"DCNM_SRP_IN_DESCR_NO_MATCH": [" ".join(wif_desc), " ".join(hif_desc)]} @@ -2814,14 +2826,24 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][0]["nvPairs"]["tag"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_TAG_NO_MATCH": [str(want["serviceNetworks"][0]["nvPairs"]["tag"]), have["serviceNetworks"][0]["nvPairs"]["tag"]]} + { + "DCNM_SRP_IN_TAG_NO_MATCH": [ + str(want["serviceNetworks"][0]["nvPairs"]["tag"]), + have["serviceNetworks"][0]["nvPairs"]["tag"], + ] + } ) if ( str(want["serviceNetworks"][0]["nvPairs"]["vlanId"]) != have["serviceNetworks"][0]["nvPairs"]["vlanId"] ): mismatch_reasons.append( - {"DCNM_SRP_IN_PROF_VID_NO_MATCH": [str(want["serviceNetworks"][0]["nvPairs"]["vlanId"]), have["serviceNetworks"][0]["nvPairs"]["vlanId"]]} + { + "DCNM_SRP_IN_PROF_VID_NO_MATCH": [ + str(want["serviceNetworks"][0]["nvPairs"]["vlanId"]), + have["serviceNetworks"][0]["nvPairs"]["vlanId"], + ] + } ) if want["deploymentMode"].lower() != "onearmadc": @@ -2832,28 +2854,48 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][1]["vrfName"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_VRF_NO_MATCH": [want["serviceNetworks"][1]["vrfName"], have["serviceNetworks"][1]["vrfName"]]} + { + "DCNM_SRP_OUT_VRF_NO_MATCH": [ + want["serviceNetworks"][1]["vrfName"], + have["serviceNetworks"][1]["vrfName"], + ] + } ) if ( want["serviceNetworks"][1]["networkType"] != have["serviceNetworks"][1]["networkType"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_NT_NO_MATCH": [want["serviceNetworks"][1]["networkType"], have["serviceNetworks"][1]["networkType"]]} + { + "DCNM_SRP_OUT_NT_NO_MATCH": [ + want["serviceNetworks"][1]["networkType"], + have["serviceNetworks"][1]["networkType"], + ] + } ) if ( want["serviceNetworks"][1]["networkName"] != have["serviceNetworks"][1]["networkName"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_NN_NO_MATCH": [want["serviceNetworks"][1]["networkName"], have["serviceNetworks"][1]["networkName"]]} + { + "DCNM_SRP_OUT_NN_NO_MATCH": [ + want["serviceNetworks"][1]["networkName"], + have["serviceNetworks"][1]["networkName"], + ] + } ) if ( want["serviceNetworks"][1]["vlanId"] != have["serviceNetworks"][1]["vlanId"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_VID_NO_MATCH": [want["serviceNetworks"][1]["vlanId"], have["serviceNetworks"][1]["vlanId"]]} + { + "DCNM_SRP_OUT_VID_NO_MATCH": [ + want["serviceNetworks"][1]["vlanId"], + have["serviceNetworks"][1]["vlanId"], + ] + } ) # Outside Network Profile @@ -2862,21 +2904,36 @@ def dcnm_srp_compare_common_info(self, want, have): != have["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_IPV4GW_NO_MATCH": [want["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"], have["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"]]} + { + "DCNM_SRP_OUT_IPV4GW_NO_MATCH": [ + want["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"], + have["serviceNetworks"][1]["nvPairs"]["gatewayIpAddress"], + ] + } ) if ( want["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"] != have["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_IPV6GW_NO_MATCH": [want["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"], have["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"]]} + { + "DCNM_SRP_OUT_IPV6GW_NO_MATCH": [ + want["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"], + have["serviceNetworks"][1]["nvPairs"]["gatewayIpV6Address"], + ] + } ) if ( want["serviceNetworks"][1]["nvPairs"]["vlanName"] != have["serviceNetworks"][1]["nvPairs"]["vlanName"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_VNAME_NO_MATCH": [want["serviceNetworks"][1]["nvPairs"]["vlanName"], have["serviceNetworks"][1]["nvPairs"]["vlanName"]]} + { + "DCNM_SRP_OUT_VNAME_NO_MATCH": [ + want["serviceNetworks"][1]["nvPairs"]["vlanName"], + have["serviceNetworks"][1]["nvPairs"]["vlanName"], + ] + } ) # When we get the SRP inmformation from have, the intfDescription would have been modified and some meta data added. so ignore the meta data @@ -2887,33 +2944,51 @@ def dcnm_srp_compare_common_info(self, want, have): ].split(" ") else: wif_desc = [] - hif_desc = have["serviceNetworks"][1]["nvPairs"][ - "intfDescription" - ].split(" ")[:-1] + hif_desc = have["serviceNetworks"][1]["nvPairs"]["intfDescription"].split( + " " + )[:-1] if wif_desc != hif_desc: mismatch_reasons.append( - {"DCNM_SRP_OUT_DESCR_NO_MATCH": [" ".join(wif_desc), " ".join(hif_desc)]} + { + "DCNM_SRP_OUT_DESCR_NO_MATCH": [ + " ".join(wif_desc), + " ".join(hif_desc), + ] + } ) if ( str(want["serviceNetworks"][1]["nvPairs"]["tag"]) != have["serviceNetworks"][1]["nvPairs"]["tag"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_TAG_NO_MATCH": [str(want["serviceNetworks"][1]["nvPairs"]["tag"]), - have["serviceNetworks"][1]["nvPairs"]["tag"]]} + { + "DCNM_SRP_OUT_TAG_NO_MATCH": [ + str(want["serviceNetworks"][1]["nvPairs"]["tag"]), + have["serviceNetworks"][1]["nvPairs"]["tag"], + ] + } ) if ( str(want["serviceNetworks"][1]["nvPairs"]["vlanId"]) != have["serviceNetworks"][1]["nvPairs"]["vlanId"] ): mismatch_reasons.append( - {"DCNM_SRP_OUT_PROF_VID_NO_MATCH": [str(want["serviceNetworks"][1]["nvPairs"]["vlanId"]), - have["serviceNetworks"][1]["nvPairs"]["vlanId"]]} + { + "DCNM_SRP_OUT_PROF_VID_NO_MATCH": [ + str(want["serviceNetworks"][1]["nvPairs"]["vlanId"]), + have["serviceNetworks"][1]["nvPairs"]["vlanId"], + ] + } ) if str(want["enabled"]).lower() != str(have["enabled"]).lower(): mismatch_reasons.append( - {"DCNM_SRP_ATT_NO_MATCH": [str(want["enabled"]).lower(), str(have["enabled"]).lower()]} + { + "DCNM_SRP_ATT_NO_MATCH": [ + str(want["enabled"]).lower(), + str(have["enabled"]).lower(), + ] + } ) if mismatch_reasons == []: @@ -2973,17 +3048,24 @@ def dcnm_srp_compare_route_info(self, want, have): if want["peeringOption"] == "StaticPeering": - if ( - want["routes"][0]["templateName"] - != have["routes"][0]["templateName"] - ): + if want["routes"][0]["templateName"] != have["routes"][0]["templateName"]: mismatch_reasons.append( - {"DCNM_SRP_SP_IN_TN_NO_MATCH": [want["routes"][0]["templateName"], have["routes"][0]["templateName"]]} + { + "DCNM_SRP_SP_IN_TN_NO_MATCH": [ + want["routes"][0]["templateName"], + have["routes"][0]["templateName"], + ] + } ) if want["routes"][0]["vrfName"] != have["routes"][0]["vrfName"]: mismatch_reasons.append( - {"DCNM_SRP_SP_IN_VRF_NO_MATCH": [want["routes"][0]["vrfName"], have["routes"][0]["vrfName"]]} + { + "DCNM_SRP_SP_IN_VRF_NO_MATCH": [ + want["routes"][0]["vrfName"], + have["routes"][0]["vrfName"], + ] + } ) wnv = want["routes"][0]["nvPairs"] @@ -2991,7 +3073,12 @@ def dcnm_srp_compare_route_info(self, want, have): if wnv["VRF_NAME"] != hnv["VRF_NAME"]: mismatch_reasons.append( - {"DCNM_SRP_SP_IN_PROF_VRF_NO_MATCH": [wnv["VRF_NAME"], hnv["VRF_NAME"]]} + { + "DCNM_SRP_SP_IN_PROF_VRF_NO_MATCH": [ + wnv["VRF_NAME"], + hnv["VRF_NAME"], + ] + } ) rc = self.dcnm_srp_compare_multi_routes( @@ -3000,7 +3087,12 @@ def dcnm_srp_compare_route_info(self, want, have): if rc == "DCNM_MR_NO_MATCH": mismatch_reasons.append( - {"DCNM_SRP_SP_IN_MR_NO_MATCH": [wnv["MULTI_ROUTES"], hnv["MULTI_ROUTES"]]} + { + "DCNM_SRP_SP_IN_MR_NO_MATCH": [ + wnv["MULTI_ROUTES"], + hnv["MULTI_ROUTES"], + ] + } ) if want["deploymentMode"] == "InterTenantFW": @@ -3010,15 +3102,22 @@ def dcnm_srp_compare_route_info(self, want, have): != have["routes"][1]["templateName"] ): mismatch_reasons.append( - {"DCNM_SRP_SP_OUT_TN_NO_MATCH": [want["routes"][1]["templateName"], have["routes"][1]["templateName"]]} + { + "DCNM_SRP_SP_OUT_TN_NO_MATCH": [ + want["routes"][1]["templateName"], + have["routes"][1]["templateName"], + ] + } ) - if ( - want["routes"][1]["vrfName"] - != have["routes"][1]["vrfName"] - ): + if want["routes"][1]["vrfName"] != have["routes"][1]["vrfName"]: mismatch_reasons.append( - {"DCNM_SRP_SP_OUT_VRF_NO_MATCH": [want["routes"][1]["vrfName"], have["routes"][1]["vrfName"]]} + { + "DCNM_SRP_SP_OUT_VRF_NO_MATCH": [ + want["routes"][1]["vrfName"], + have["routes"][1]["vrfName"], + ] + } ) wnv = want["routes"][1]["nvPairs"] @@ -3026,7 +3125,12 @@ def dcnm_srp_compare_route_info(self, want, have): if wnv["VRF_NAME"] != hnv["VRF_NAME"]: mismatch_reasons.append( - {"DCNM_SRP_SP_OUT_PROF_VRF_NO_MATCH": [wnv["VRF_NAME"], hnv["VRF_NAME"]]} + { + "DCNM_SRP_SP_OUT_PROF_VRF_NO_MATCH": [ + wnv["VRF_NAME"], + hnv["VRF_NAME"], + ] + } ) rc = self.dcnm_srp_compare_multi_routes( @@ -3035,17 +3139,24 @@ def dcnm_srp_compare_route_info(self, want, have): if rc == "DCNM_MR_NO_MATCH": mismatch_reasons.append( - {"DCNM_SRP_SP_OUT_MR_NO_MATCH": [wnv["MULTI_ROUTES"], hnv["MULTI_ROUTES"]]} + { + "DCNM_SRP_SP_OUT_MR_NO_MATCH": [ + wnv["MULTI_ROUTES"], + hnv["MULTI_ROUTES"], + ] + } ) elif want["peeringOption"] == "EBGPDynamicPeering": - if ( - want["routes"][0]["templateName"] - != have["routes"][0]["templateName"] - ): + if want["routes"][0]["templateName"] != have["routes"][0]["templateName"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_TN_NO_MATCH": [want["routes"][0]["templateName"], have["routes"][0]["templateName"]]} + { + "DCNM_SRP_EBGP_IN_TN_NO_MATCH": [ + want["routes"][0]["templateName"], + have["routes"][0]["templateName"], + ] + } ) wnv = want["routes"][0]["nvPairs"] @@ -3053,59 +3164,116 @@ def dcnm_srp_compare_route_info(self, want, have): if wnv["NEIGHBOR_IP"] != hnv["NEIGHBOR_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_NIP4_NO_MATCH": [wnv["NEIGHBOR_IP"], hnv["NEIGHBOR_IP"]]} + { + "DCNM_SRP_EBGP_IN_NIP4_NO_MATCH": [ + wnv["NEIGHBOR_IP"], + hnv["NEIGHBOR_IP"], + ] + } ) if wnv["LOOPBACK_IP"] != hnv["LOOPBACK_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_LIP4_NO_MATCH": [wnv["LOOPBACK_IP"], hnv["LOOPBACK_IP"]]} + { + "DCNM_SRP_EBGP_IN_LIP4_NO_MATCH": [ + wnv["LOOPBACK_IP"], + hnv["LOOPBACK_IP"], + ] + } ) if wnv["PEER_LOOPBACK_IP"] != hnv["PEER_LOOPBACK_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_PLIP4_NO_MATCH": [wnv["PEER_LOOPBACK_IP"], hnv["PEER_LOOPBACK_IP"]]} + { + "DCNM_SRP_EBGP_IN_PLIP4_NO_MATCH": [ + wnv["PEER_LOOPBACK_IP"], + hnv["PEER_LOOPBACK_IP"], + ] + } ) if wnv["NEIGHBOR_IPV6"] != hnv["NEIGHBOR_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_NIP6_NO_MATCH": [wnv["NEIGHBOR_IPV6"], hnv["NEIGHBOR_IPV6"]]} + { + "DCNM_SRP_EBGP_IN_NIP6_NO_MATCH": [ + wnv["NEIGHBOR_IPV6"], + hnv["NEIGHBOR_IPV6"], + ] + } ) if wnv["LOOPBACK_IPV6"] != hnv["LOOPBACK_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_LIP6_NO_MATCH": [wnv["LOOPBACK_IPV6"], hnv["LOOPBACK_IPV6"]]} + { + "DCNM_SRP_EBGP_IN_LIP6_NO_MATCH": [ + wnv["LOOPBACK_IPV6"], + hnv["LOOPBACK_IPV6"], + ] + } ) if wnv["PEER_LOOPBACK_IPV6"] != hnv["PEER_LOOPBACK_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_PLIP6_NO_MATCH": [wnv["PEER_LOOPBACK_IPV6"], hnv["PEER_LOOPBACK_IPV6"]]} + { + "DCNM_SRP_EBGP_IN_PLIP6_NO_MATCH": [ + wnv["PEER_LOOPBACK_IPV6"], + hnv["PEER_LOOPBACK_IPV6"], + ] + } ) if str(wnv["ROUTE_MAP_TAG"]) != hnv["ROUTE_MAP_TAG"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_RMT_NO_MATCH": [str(wnv["ROUTE_MAP_TAG"]), hnv["ROUTE_MAP_TAG"]]} + { + "DCNM_SRP_EBGP_IN_RMT_NO_MATCH": [ + str(wnv["ROUTE_MAP_TAG"]), + hnv["ROUTE_MAP_TAG"], + ] + } ) if wnv["DESC"] != hnv["DESC"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_DESCR_NO_MATCH": [ wnv["DESC"], hnv["DESC"]]} + {"DCNM_SRP_EBGP_IN_DESCR_NO_MATCH": [wnv["DESC"], hnv["DESC"]]} ) if str(wnv["LOCAL_ASN"]) != hnv["LOCAL_ASN"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_ASN_NO_MATCH": [str(wnv["LOCAL_ASN"]), hnv["LOCAL_ASN"]]} + { + "DCNM_SRP_EBGP_IN_ASN_NO_MATCH": [ + str(wnv["LOCAL_ASN"]), + hnv["LOCAL_ASN"], + ] + } ) - if ( - str(wnv["ADVERTISE_HOST_ROUTE"]).lower() - != hnv["ADVERTISE_HOST_ROUTE"] - ): + if str(wnv["ADVERTISE_HOST_ROUTE"]).lower() != hnv["ADVERTISE_HOST_ROUTE"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_ADV_HR_NO_MATCH": [str(wnv["ADVERTISE_HOST_ROUTE"]).lower(), hnv["ADVERTISE_HOST_ROUTE"]]} + { + "DCNM_SRP_EBGP_IN_ADV_HR_NO_MATCH": [ + str(wnv["ADVERTISE_HOST_ROUTE"]).lower(), + hnv["ADVERTISE_HOST_ROUTE"], + ] + } ) if str(wnv["ADMIN_STATE"]).lower() != hnv["ADMIN_STATE"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_AS_NO_MATCH": [str(wnv["ADMIN_STATE"]).lower(), hnv["ADMIN_STATE"]]} + { + "DCNM_SRP_EBGP_IN_AS_NO_MATCH": [ + str(wnv["ADMIN_STATE"]).lower(), + hnv["ADMIN_STATE"], + ] + } ) if wnv["VRF_NAME"] != hnv["VRF_NAME"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_PROF_VRF_NO_MATCH": [wnv["VRF_NAME"], hnv["VRF_NAME"]]} + { + "DCNM_SRP_EBGP_IN_PROF_VRF_NO_MATCH": [ + wnv["VRF_NAME"], + hnv["VRF_NAME"], + ] + } ) if want["routes"][0]["vrfName"] != have["routes"][0]["vrfName"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_IN_VRF_NO_MATCH": [want["routes"][0]["vrfName"], have["routes"][0]["vrfName"]]} + { + "DCNM_SRP_EBGP_IN_VRF_NO_MATCH": [ + want["routes"][0]["vrfName"], + have["routes"][0]["vrfName"], + ] + } ) if want["deploymentMode"] == "InterTenantFW": @@ -3115,7 +3283,12 @@ def dcnm_srp_compare_route_info(self, want, have): != have["routes"][1]["templateName"] ): mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_TN_NO_MATCH": [want["routes"][1]["templateName"], have["routes"][1]["templateName"]]} + { + "DCNM_SRP_EBGP_OUT_TN_NO_MATCH": [ + want["routes"][1]["templateName"], + have["routes"][1]["templateName"], + ] + } ) wnv = want["routes"][1]["nvPairs"] @@ -3123,31 +3296,66 @@ def dcnm_srp_compare_route_info(self, want, have): if wnv["NEIGHBOR_IP"] != hnv["NEIGHBOR_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_NIP4_NO_MATCH": [wnv["NEIGHBOR_IP"], hnv["NEIGHBOR_IP"]]} + { + "DCNM_SRP_EBGP_OUT_NIP4_NO_MATCH": [ + wnv["NEIGHBOR_IP"], + hnv["NEIGHBOR_IP"], + ] + } ) if wnv["LOOPBACK_IP"] != hnv["LOOPBACK_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_LIP4_NO_MATCH": [wnv["LOOPBACK_IP"], hnv["LOOPBACK_IP"]]} + { + "DCNM_SRP_EBGP_OUT_LIP4_NO_MATCH": [ + wnv["LOOPBACK_IP"], + hnv["LOOPBACK_IP"], + ] + } ) if wnv["PEER_LOOPBACK_IP"] != hnv["PEER_LOOPBACK_IP"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_PLIP4_NO_MATCH": [wnv["PEER_LOOPBACK_IP"], hnv["PEER_LOOPBACK_IP"]]} + { + "DCNM_SRP_EBGP_OUT_PLIP4_NO_MATCH": [ + wnv["PEER_LOOPBACK_IP"], + hnv["PEER_LOOPBACK_IP"], + ] + } ) if wnv["NEIGHBOR_IPV6"] != hnv["NEIGHBOR_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_NIP6_NO_MATCH": [wnv["NEIGHBOR_IPV6"], hnv["NEIGHBOR_IPV6"]]} + { + "DCNM_SRP_EBGP_OUT_NIP6_NO_MATCH": [ + wnv["NEIGHBOR_IPV6"], + hnv["NEIGHBOR_IPV6"], + ] + } ) if wnv["LOOPBACK_IPV6"] != hnv["LOOPBACK_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_LIP6_NO_MATCH": [wnv["LOOPBACK_IPV6"], hnv["LOOPBACK_IPV6"]]} + { + "DCNM_SRP_EBGP_OUT_LIP6_NO_MATCH": [ + wnv["LOOPBACK_IPV6"], + hnv["LOOPBACK_IPV6"], + ] + } ) if wnv["PEER_LOOPBACK_IPV6"] != hnv["PEER_LOOPBACK_IPV6"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_PLIP6_NO_MATCH": [wnv["PEER_LOOPBACK_IPV6"], hnv["PEER_LOOPBACK_IPV6"]]} + { + "DCNM_SRP_EBGP_OUT_PLIP6_NO_MATCH": [ + wnv["PEER_LOOPBACK_IPV6"], + hnv["PEER_LOOPBACK_IPV6"], + ] + } ) if str(wnv["ROUTE_MAP_TAG"]) != hnv["ROUTE_MAP_TAG"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_RMT_NO_MATCH": [str(wnv["ROUTE_MAP_TAG"]), hnv["ROUTE_MAP_TAG"]]} + { + "DCNM_SRP_EBGP_OUT_RMT_NO_MATCH": [ + str(wnv["ROUTE_MAP_TAG"]), + hnv["ROUTE_MAP_TAG"], + ] + } ) if wnv["DESC"] != hnv["DESC"]: mismatch_reasons.append( @@ -3155,30 +3363,52 @@ def dcnm_srp_compare_route_info(self, want, have): ) if str(wnv["LOCAL_ASN"]) != hnv["LOCAL_ASN"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_ASN_NO_MATCH": [str(wnv["LOCAL_ASN"]), hnv["LOCAL_ASN"]]} + { + "DCNM_SRP_EBGP_OUT_ASN_NO_MATCH": [ + str(wnv["LOCAL_ASN"]), + hnv["LOCAL_ASN"], + ] + } ) if ( str(wnv["ADVERTISE_HOST_ROUTE"]).lower() != hnv["ADVERTISE_HOST_ROUTE"] ): mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_ADV_HR_NO_MATCH": [str(wnv["ADVERTISE_HOST_ROUTE"]).lower(), hnv["ADVERTISE_HOST_ROUTE"]]} + { + "DCNM_SRP_EBGP_OUT_ADV_HR_NO_MATCH": [ + str(wnv["ADVERTISE_HOST_ROUTE"]).lower(), + hnv["ADVERTISE_HOST_ROUTE"], + ] + } ) if str(wnv["ADMIN_STATE"]).lower() != hnv["ADMIN_STATE"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_AS_NO_MATCH": [str(wnv["ADMIN_STATE"]).lower(), hnv["ADMIN_STATE"]]} + { + "DCNM_SRP_EBGP_OUT_AS_NO_MATCH": [ + str(wnv["ADMIN_STATE"]).lower(), + hnv["ADMIN_STATE"], + ] + } ) if wnv["VRF_NAME"] != hnv["VRF_NAME"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_PROF_VRF_NO_MATCH": [wnv["VRF_NAME"], hnv["VRF_NAME"]]} + { + "DCNM_SRP_EBGP_OUT_PROF_VRF_NO_MATCH": [ + wnv["VRF_NAME"], + hnv["VRF_NAME"], + ] + } ) - if ( - want["routes"][1]["vrfName"] - != have["routes"][1]["vrfName"] - ): + if want["routes"][1]["vrfName"] != have["routes"][1]["vrfName"]: mismatch_reasons.append( - {"DCNM_SRP_EBGP_OUT_VRF_NO_MATCH": [want["routes"][1]["vrfName"], have["routes"][1]["vrfName"]]} + { + "DCNM_SRP_EBGP_OUT_VRF_NO_MATCH": [ + want["routes"][1]["vrfName"], + have["routes"][1]["vrfName"], + ] + } ) if mismatch_reasons == []: @@ -3234,7 +3464,7 @@ def dcnm_srp_compare_route_peerings(self, srp): else: return ("DCNM_SRP_ADD_NEW", None, []) - def dcnm_srp_get_sno_list (self, have): + def dcnm_srp_get_sno_list(self, have): """ Routine to get the list of serial numbers from the given SRP @@ -3254,10 +3484,10 @@ def dcnm_srp_get_sno_list (self, have): if sw_status["switchSerialNumber"] not in sno_list: sno_list.append(sw_status["switchSerialNumber"]) else: - self.changed_dict[0]["debugs"].append({"HAVE W/O ATTACHS": have }) + self.changed_dict[0]["debugs"].append({"HAVE W/O ATTACHS": have}) return sno_list - def dcnm_srp_get_vlan_list (self, have): + def dcnm_srp_get_vlan_list(self, have): """ Routine to get the list of vlans from the given SRP @@ -3293,7 +3523,12 @@ def dcnm_srp_get_srp_deployment_status(self, srp, have, chk_deployed): deployed (bool): a flag indicating is the given SRP is deployed """ - path = self.paths["GET_SRP_DEPLOY_STATUS"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"], srp["peeringName"]) + path = self.paths["GET_SRP_DEPLOY_STATUS"].format( + srp["fabricName"], + srp["serviceNodeName"], + srp["attachedFabricName"], + srp["peeringName"], + ) retries = 0 while retries < 30: retries += 1 @@ -3303,11 +3538,12 @@ def dcnm_srp_get_srp_deployment_status(self, srp, have, chk_deployed): self.dcnm_srp_check_for_errors_in_resp(resp) time.sleep(10) continue - elif resp["RETURN_CODE"] == 200 and resp.get("DATA") == []: + + if resp["RETURN_CODE"] == 200 and resp.get("DATA") == []: time.sleep(10) continue - else: - break + + break if resp: resp["RETRIES"] = retries @@ -3321,9 +3557,9 @@ def dcnm_srp_get_srp_deployment_status(self, srp, have, chk_deployed): and (resp.get("DATA", None) is not None) ): if chk_deployed: - check_list = self.dcnm_srp_get_vlan_list (have) + check_list = self.dcnm_srp_get_vlan_list(have) else: - check_list = self.dcnm_srp_get_sno_list (have) + check_list = self.dcnm_srp_get_sno_list(have) resp["check_list"] = check_list @@ -3347,9 +3583,7 @@ def dcnm_srp_get_srp_deployment_status(self, srp, have, chk_deployed): return resp, False, False, attach["attachState"].lower() elif attach["attachState"].lower() == "out-of-sync": srp_info = {} - self.dcnm_srp_combine_route_peerings( - srp, srp_info - ) + self.dcnm_srp_combine_route_peerings(srp, srp_info) for path in srp_info: self.dcnm_srp_deploy_srp(path, srp_info[path]) @@ -3394,7 +3628,9 @@ def dcnm_srp_get_diff_merge(self): elif rc == "DCNM_SRP_MERGE": # A srp exists and it needs to be updated self.changed_dict[0]["modified"].append(srp) - self.changed_dict[0]["debugs"].append({"PeeringName": srp["peeringName"], "REASONS": reasons}) + self.changed_dict[0]["debugs"].append( + {"PeeringName": srp["peeringName"], "REASONS": reasons} + ) self.diff_modify.append(srp) # Check the 'deploy' flag and decide if this srp is to be deployed @@ -3414,11 +3650,14 @@ def dcnm_srp_get_diff_merge(self): retries = 0 while retries < 30: retries += 1 - resp, retry, deployed, att_state = self.dcnm_srp_get_srp_deployment_status( - srp, have, True - ) - - if att_state == 'out-of-sync': + ( + resp, + retry, + deployed, + att_state, + ) = self.dcnm_srp_get_srp_deployment_status(srp, have, True) + + if att_state == "out-of-sync": if retries == 20: # There are some timing issues in DCNM and the final deployed state of a RP depends on the order of # deploying VRFs sand Networks on the switch. Sometimes due to timing issues an RP may get stuck in @@ -3429,8 +3668,8 @@ def dcnm_srp_get_diff_merge(self): if retry: time.sleep(10) continue - else: - break + + break if resp not in self.changed_dict[0]["debugs"]: resp["RETRIES"] = retries @@ -3440,9 +3679,9 @@ def dcnm_srp_get_diff_merge(self): # We deploy when self.deploy is True and: # 1. there are no changes due to this request(rc is DCNM_SRP_DONT_ADD), but the SRP is not deployed # 2. there are changes due to this request (rc is DCNM_SRP_MERGE) - if ( - (rc == "DCNM_SRP_DONT_ADD") and (deployed is False) - ) or (rc == "DCNM_SRP_MERGE"): + if ((rc == "DCNM_SRP_DONT_ADD") and (deployed is False)) or ( + rc == "DCNM_SRP_MERGE" + ): if srp["enabled"]: ditem = {} ditem["serviceNodeName"] = srp["serviceNodeName"] @@ -3477,9 +3716,7 @@ def dcnm_srp_get_diff_deleted(self): for snode in serv_nodes: if snode["name"] in processed_nodes: continue - srps = self.dcnm_srp_get_srp_info_with_service_node( - snode["name"] - ) + srps = self.dcnm_srp_get_srp_info_with_service_node(snode["name"]) if srps: self.diff_delete.extend(srps) self.changed_dict[0]["deleted"].extend(srps) @@ -3493,9 +3730,7 @@ def dcnm_srp_get_diff_deleted(self): match_srps = [] # If peering name is given, get the specific route peering if snode.get("name") != "": - srps = self.dcnm_srp_get_srp_info_from_dcnm( - snode, "PLAYBOOK" - ) + srps = self.dcnm_srp_get_srp_info_from_dcnm(snode, "PLAYBOOK") if srps != [] and srps not in self.diff_delete: match_srps = srps else: @@ -3542,9 +3777,7 @@ def dcnm_srp_get_diff_query(self): self.result["response"].append(resp) else: # peeringName not included - resp = self.dcnm_srp_get_srp_info_with_service_node( - srp["node_name"] - ) + resp = self.dcnm_srp_get_srp_info_with_service_node(srp["node_name"]) if resp != []: self.result["response"].extend(resp) @@ -3587,9 +3820,7 @@ def dcnm_srp_get_diff_overridden(self): (srp["peeringName"] == want["peeringName"]) and (srp["fabricName"] == want["fabricName"]) and (srp["serviceNodeName"] == want["serviceNodeName"]) - and ( - srp["attachedFabricName"] == want["attachedFabricName"] - ) + and (srp["attachedFabricName"] == want["attachedFabricName"]) ) ] if match_want == []: @@ -3616,9 +3847,16 @@ def dcnm_srp_create_srp(self, srp, command): """ if command == "POST": - path = self.paths["CREATE_SRP"].format(srp["fabricName"], srp["serviceNodeName"]) + path = self.paths["CREATE_SRP"].format( + srp["fabricName"], srp["serviceNodeName"] + ) else: - path = self.paths["UPDATE_SRP"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"], srp["peeringName"]) + path = self.paths["UPDATE_SRP"].format( + srp["fabricName"], + srp["serviceNodeName"], + srp["attachedFabricName"], + srp["peeringName"], + ) json_payload = json.dumps(srp) @@ -3659,7 +3897,12 @@ def dcnm_srp_delete_srp(self, srp): """ # Delete the route peering - path = self.paths["DELETE_SRP"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"], srp["peeringName"]) + path = self.paths["DELETE_SRP"].format( + srp["fabricName"], + srp["serviceNodeName"], + srp["attachedFabricName"], + srp["peeringName"], + ) srp["enabled"] = False srp["status"] = "NA" @@ -3680,9 +3923,11 @@ def dcnm_srp_attach_srp(self, srp): resp (dict): Response from DCNM server """ - path = self.paths["ATTACH_SRP"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"]) + path = self.paths["ATTACH_SRP"].format( + srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"] + ) - attach_payload = {"peeringNames" : [srp["peeringName"]]} + attach_payload = {"peeringNames": [srp["peeringName"]]} json_payload = json.dumps(attach_payload) resp = dcnm_send(self.module, "POST", path, json_payload) @@ -3719,17 +3964,17 @@ def dcnm_srp_attach_and_deploy_srp(self, srp): resp (dict): Response from DCNM server """ - fixed_path = self.paths["DEPLOY_SRP_PREFIX"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"]) + fixed_path = self.paths["DEPLOY_SRP_PREFIX"].format( + srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"] + ) detach_srp_info = {} - detach_srp_info = self.dcnm_srp_combine_route_peerings( - srp, detach_srp_info - ) + detach_srp_info = self.dcnm_srp_combine_route_peerings(srp, detach_srp_info) for path in detach_srp_info: - self.dcnm_srp_detach_srp (path, detach_srp_info[path]["peeringNames"]) + self.dcnm_srp_detach_srp(path, detach_srp_info[path]["peeringNames"]) time.sleep(10) - self.dcnm_srp_attach_srp (srp) + self.dcnm_srp_attach_srp(srp) time.sleep(10) self.dcnm_srp_deploy_srp(fixed_path, {"peeringNames": [srp["peeringName"]]}) @@ -3755,10 +4000,17 @@ def dcnm_srp_check_deployment_status(self, srp_list, final_state): att_state = "Unknown" while retries < 50: retries += 1 - resp, retry, deployed, att_state = self.dcnm_srp_get_srp_deployment_status(srp, srp, (final_state == "deployed")) + ( + resp, + retry, + deployed, + att_state, + ) = self.dcnm_srp_get_srp_deployment_status( + srp, srp, (final_state == "deployed") + ) if att_state == final_state: - break; + break if att_state == "pending": if (retries % 10) == 0: self.dcnm_srp_config_save_and_deploy() @@ -3772,11 +4024,21 @@ def dcnm_srp_check_deployment_status(self, srp_list, final_state): if (retries % 10) == 0: self.dcnm_srp_attach_and_deploy_srp(srp) time.sleep(30) - self.changed_dict[0]["debugs"].append({"PeeringName": srp["peeringName"], "State": att_state}) + self.changed_dict[0]["debugs"].append( + {"PeeringName": srp["peeringName"], "State": att_state} + ) # After all retries, if the SRP did not move to 'final_state' it is an error if att_state != final_state: # Note down the SRP to aid in debugging - self.module.fail_json (msg={"CHANGED": self.changed_dict[0], "FAILURE REASON": "SRP "+ srp["peeringName"] +" did not reach 'In-Sync' State", "Attach State" : att_state}) + self.module.fail_json( + msg={ + "CHANGED": self.changed_dict[0], + "FAILURE REASON": "SRP " + + srp["peeringName"] + + " did not reach 'In-Sync' State", + "Attach State": att_state, + } + ) def dcnm_srp_combine_route_peerings(self, srp, srp_info): @@ -3793,7 +4055,9 @@ def dcnm_srp_combine_route_peerings(self, srp, srp_info): srp_info(dict): A dict containing a list of combined peerings including the current one """ - path = self.paths["SRP_FIXED_PREFIX"].format(srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"]) + path = self.paths["SRP_FIXED_PREFIX"].format( + srp["fabricName"], srp["serviceNodeName"], srp["attachedFabricName"] + ) if srp_info.get(path) is None: srp_info[path] = {"peeringNames": []} @@ -3813,7 +4077,9 @@ def dcnm_srp_config_save_and_deploy(self): resp (dict): Response from DCNM server """ - path = self.paths["SRP_CFG_SAVE_AND_DEPLOY"].format(self.module.params["fabric"]) + path = self.paths["SRP_CFG_SAVE_AND_DEPLOY"].format( + self.module.params["fabric"] + ) resp = dcnm_send(self.module, "POST", path, "") return resp @@ -3850,12 +4116,8 @@ def dcnm_srp_check_for_errors_in_resp(self, resp): and resp["DATA"]["error"].get("code") == "InvalidRequest" ): if ( - "not allowed" - not in resp["DATA"]["error"].get("detail", "") - ) and ( - "Deployment" - not in resp["DATA"]["error"].get("detail", "") - ): + "not allowed" not in resp["DATA"]["error"].get("detail", "") + ) and ("Deployment" not in resp["DATA"]["error"].get("detail", "")): # For the case of "InvalidRequest", check if it is because of deployment operation. If not, we should # reset the connection because the token may have expired in the middle of transaction. dcnm_reset_connection(self.module) @@ -3871,15 +4133,16 @@ def dcnm_srp_check_for_errors_in_resp(self, resp): isinstance(resp["DATA"]["error"], dict) and resp["DATA"]["error"].get("code") == "ProcessingError" ): - if ( - "is in use already" - in resp["DATA"]["error"].get("detail", "") + if "is in use already" in resp["DATA"]["error"].get( + "detail", "" ): rc = "in_use_error" - resp["VLANS"] = re.findall(r'\d+', resp["DATA"]["error"].get("detail", "")) + resp["VLANS"] = re.findall( + r"\d+", resp["DATA"]["error"].get("detail", "") + ) return rc - def dcnm_srp_get_deployed_srp_list (self, diff_deploy): + def dcnm_srp_get_deployed_srp_list(self, diff_deploy): """ Routine to match SRPs fromself.diff_create and self.diff_modify and return a list of all matching SRPs @@ -3904,9 +4167,7 @@ def dcnm_srp_get_deployed_srp_list (self, diff_deploy): (srp["peeringName"] == item["peeringName"]) and (srp["fabricName"] == item["fabricName"]) and (srp["serviceNodeName"] == item["serviceNodeName"]) - and ( - srp["attachedFabricName"] == item["attachedFabricName"] - ) + and (srp["attachedFabricName"] == item["attachedFabricName"]) ) ] if match_srp != []: @@ -3921,9 +4182,7 @@ def dcnm_srp_get_deployed_srp_list (self, diff_deploy): (srp["peeringName"] == item["peeringName"]) and (srp["fabricName"] == item["fabricName"]) and (srp["serviceNodeName"] == item["serviceNodeName"]) - and ( - srp["attachedFabricName"] == item["attachedFabricName"] - ) + and (srp["attachedFabricName"] == item["attachedFabricName"]) ) ] if match_srp != []: @@ -3943,9 +4202,7 @@ def dcnm_srp_get_deployed_srp_list (self, diff_deploy): (srp["peeringName"] == item["peeringName"]) and (srp["fabricName"] == item["fabricName"]) and (srp["serviceNodeName"] == item["serviceNodeName"]) - and ( - srp["attachedFabricName"] == item["attachedFabricName"] - ) + and (srp["attachedFabricName"] == item["attachedFabricName"]) ) ] if match_srp != []: @@ -3986,37 +4243,36 @@ def dcnm_srp_send_message_to_dcnm(self): # would have detached it explicitly. So we need to attach it explicitly again if attach_flag: attach_flag = False - self.dcnm_srp_attach_srp (srp) + self.dcnm_srp_attach_srp(srp) break - else: - # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to - # logout and login again. We will do the logout from here and expect the login to happen again after this - # from the connection module - rc = self.dcnm_srp_check_for_errors_in_resp(resp) - if rc == "in_use_error": - # We may see this if SRPs use a vlan id already in use. In that case update the SRP with a new - # allocated VLAN id. - for net in srp["serviceNetworks"]: - if str(net["vlanId"]) in resp["VLANS"]: - net["vlanId"] = 0 - # Since we have zeroed out the vlans which errored, allocate new IDs - self.dcnm_srp_allocate_vlan_id(self.module.params["fabric"], srp) - if srp["enabled"]: - attach_flag = True - - # There may be a temporary issue on the server. so we should try again. In case - # of create or modify, the peering may have been created/updated, but the error may - # be due to the attach. So check if the peering is created and if attach flag is set. - # If so then try attaching the peering and do not try to recreate - get_resp = self.dcnm_srp_get_srp_info_from_dcnm( - srp, "PAYLOAD" - ) - if get_resp != []: - # Since the peering is already created, use PUT to update the peering again with - # the same payload - command = "PUT" - time.sleep(10) - continue + + # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to + # logout and login again. We will do the logout from here and expect the login to happen again after this + # from the connection module + rc = self.dcnm_srp_check_for_errors_in_resp(resp) + if rc == "in_use_error": + # We may see this if SRPs use a vlan id already in use. In that case update the SRP with a new + # allocated VLAN id. + for net in srp["serviceNetworks"]: + if str(net["vlanId"]) in resp["VLANS"]: + net["vlanId"] = 0 + # Since we have zeroed out the vlans which errored, allocate new IDs + self.dcnm_srp_allocate_vlan_id(self.module.params["fabric"], srp) + if srp["enabled"]: + attach_flag = True + + # There may be a temporary issue on the server. so we should try again. In case + # of create or modify, the peering may have been created/updated, but the error may + # be due to the attach. So check if the peering is created and if attach flag is set. + # If so then try attaching the peering and do not try to recreate + get_resp = self.dcnm_srp_get_srp_info_from_dcnm(srp, "PAYLOAD") + if get_resp != []: + # Since the peering is already created, use PUT to update the peering again with + # the same payload + command = "PUT" + time.sleep(10) + continue + resp["RETRIES"] = retries self.result["response"].append(resp) if resp and resp.get("RETURN_CODE") != 200: @@ -4034,21 +4290,23 @@ def dcnm_srp_send_message_to_dcnm(self): # If attach_flag is set, try to attach the SRP. This is required in case of in_use_error, because DCNM # would have detached it explicitly. So we need to attach it explicitly again if srp["enabled"]: - att_resp = self.dcnm_srp_attach_srp (srp) + att_resp = self.dcnm_srp_attach_srp(srp) if att_resp["RETURN_CODE"] == 200: break - if (resp and resp.get("RETURN_CODE") != 200) or (att_resp and att_resp["RETURN_CODE"] != 200): + if (resp and resp.get("RETURN_CODE") != 200) or ( + att_resp and att_resp["RETURN_CODE"] != 200 + ): # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to # logout and login again. We will do the logout from here and expect the login to happen again after this # from the connection module if resp: rc1 = self.dcnm_srp_check_for_errors_in_resp(resp) else: - rc1 = ' ' + rc1 = " " if att_resp: rc2 = self.dcnm_srp_check_for_errors_in_resp(att_resp) else: - rc2 = ' ' + rc2 = " " if rc1 == "in_use_error": chk_resp = resp @@ -4062,7 +4320,9 @@ def dcnm_srp_send_message_to_dcnm(self): if str(net["vlanId"]) in chk_resp["VLANS"]: net["vlanId"] = 0 # Since we have zeroed out the vlans which errored, allocate new IDs - self.dcnm_srp_allocate_vlan_id(self.module.params["fabric"], srp) + self.dcnm_srp_allocate_vlan_id( + self.module.params["fabric"], srp + ) time.sleep(10) continue resp["RETRIES"] = retries @@ -4104,9 +4364,9 @@ def dcnm_srp_send_message_to_dcnm(self): # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to # logout and login again. We will do the logout from here and expect the login to happen again after this # from the connection module - resp["METHOD"] = '' + resp["METHOD"] = "" rc = self.dcnm_srp_check_for_errors_in_resp(resp) - resp["METHOD"] = 'DELETE' + resp["METHOD"] = "DELETE" if rc == "in_use_error": # We may see this if SRPs use a vlan id already in use. In such a case delete the SRP directly @@ -4129,7 +4389,7 @@ def dcnm_srp_send_message_to_dcnm(self): retries = 0 # Check if we have marked the SRP for no deploy. If yes skip it - if delete_srp_info[path].get("deploy", ' ') is False: + if delete_srp_info[path].get("deploy", " ") is False: continue while retries < 30: @@ -4160,7 +4420,7 @@ def dcnm_srp_send_message_to_dcnm(self): self.module.fail_json(msg=resp) if delete_flag is True: - self.dcnm_srp_check_deployment_status (self.diff_delete, "na") + self.dcnm_srp_check_deployment_status(self.diff_delete, "na") for srp in self.diff_delete: retries = 0 @@ -4171,35 +4431,32 @@ def dcnm_srp_send_message_to_dcnm(self): if (resp is not None) and (resp.get("RETURN_CODE") == 200): delete_flag = True break - else: - # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to - # logout and login again. We will do the logout from here and expect the login to happen again after this - # from the connection module - self.dcnm_srp_check_for_errors_in_resp(resp) - if retries == 20: - # We failed to delete even after all retries. Try a config save and deploy which - # may pull out of the situation + # We sometimes see "UserUnauthorized" errors while transacting with DCNM server. Suggested remedy is to + # logout and login again. We will do the logout from here and expect the login to happen again after this + # from the connection module + self.dcnm_srp_check_for_errors_in_resp(resp) + + if retries == 20: + # We failed to delete even after all retries. Try a config save and deploy which + # may pull out of the situation + + resp = self.dcnm_srp_config_save_and_deploy() + self.result["response"].append(resp) + elif deploy_in_prog is False: + # We will require a deploy here. Otherwise we may see delete errors in some cases + # indicating that a deploy operation is still in progress and peering cannot be deleted + srp_info = {} + srp_info = self.dcnm_srp_combine_route_peerings(srp, srp_info) + for path in srp_info: + resp = self.dcnm_srp_deploy_srp(path, srp_info[path]) + if resp.get("RETURN_CODE") == 200: + deploy_in_prog = True + else: + self.dcnm_srp_check_for_errors_in_resp(resp) + time.sleep(10) + continue - resp = self.dcnm_srp_config_save_and_deploy() - self.result["response"].append(resp) - elif deploy_in_prog is False: - # We will require a deploy here. Otherwise we may see delete errors in some cases - # indicating that a deploy operation is still in progress and peering cannot be deleted - srp_info = {} - srp_info = self.dcnm_srp_combine_route_peerings( - srp, srp_info - ) - for path in srp_info: - resp = self.dcnm_srp_deploy_srp(path, srp_info[path]) - if resp.get("RETURN_CODE") == 200: - deploy_in_prog = True - else: - self.dcnm_srp_check_for_errors_in_resp( - resp - ) - time.sleep(10) - continue if resp is not None: resp["RETRIES"] = retries self.result["response"].append(resp) @@ -4248,10 +4505,10 @@ def dcnm_srp_send_message_to_dcnm(self): if deploy_flag: # We need the SRPs from create and modify list to check for deployment status. Collect them into new list - self.deployed_srps = self.dcnm_srp_get_deployed_srp_list (self.diff_deploy) + self.deployed_srps = self.dcnm_srp_get_deployed_srp_list(self.diff_deploy) # Ensure all the route peerings are properly deployed before returning. - self.dcnm_srp_check_deployment_status (self.deployed_srps, "deployed") + self.dcnm_srp_check_deployment_status(self.deployed_srps, "deployed") self.result["changed"] = ( create_flag or modify_flag or delete_flag or deploy_flag @@ -4260,24 +4517,21 @@ def dcnm_srp_send_message_to_dcnm(self): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( fabric=dict(required=True, type="str"), service_fabric=dict(required=True, type="str"), - config=dict(required=False, type="list"), + config=dict(required=False, type="list", elements="dict"), state=dict( type="str", default="merged", choices=["merged", "deleted", "replaced", "query", "overridden"], ), deploy=dict(required=False, type="bool", default=True), - attach=dict(required=False, type="bool"), + attach=dict(required=False, type="bool", default=True), ) - module = AnsibleModule( - argument_spec=element_spec, supports_check_mode=True - ) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_srp = DcnmServiceRoutePeering(module) @@ -4297,16 +4551,14 @@ def main(): if not dcnm_srp.config: if state == "merged" or state == "replaced" or state == "query": module.fail_json( - msg="'config' element is mandatory for state '{}', given = '{}'".format( + msg="'config' element is mandatory for state '{0}', given = '{1}'".format( state, dcnm_srp.config ) ) dcnm_srp.dcnm_srp_validate_input() - if (module.params["state"] != "query") and ( - module.params["state"] != "deleted" - ): + if (module.params["state"] != "query") and (module.params["state"] != "deleted"): dcnm_srp.dcnm_srp_get_want() dcnm_srp.dcnm_srp_get_have() @@ -4317,9 +4569,7 @@ def main(): dcnm_srp.dcnm_srp_update_want() - if (module.params["state"] == "merged") or ( - module.params["state"] == "replaced" - ): + if (module.params["state"] == "merged") or (module.params["state"] == "replaced"): dcnm_srp.dcnm_srp_get_diff_merge() if module.params["state"] == "deleted": @@ -4346,5 +4596,6 @@ def main(): dcnm_srp.result["EndTime"] = datetime.now().strftime("%H:%M:%S") module.exit_json(**dcnm_srp.result) + if __name__ == "__main__": main() diff --git a/plugins/modules/dcnm_template.py b/plugins/modules/dcnm_template.py index 98099e28d..059b1b8e4 100644 --- a/plugins/modules/dcnm_template.py +++ b/plugins/modules/dcnm_template.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function +__metaclass__ = type __author__ = "Mallik Mudigonda" DOCUMENTATION = """ @@ -178,7 +180,7 @@ from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( dcnm_send, validate_list_of_dicts, - dcnm_version_supported + dcnm_version_supported, ) @@ -186,21 +188,21 @@ class DcnmTemplate: dcnm_template_paths = { 11: { - "TEMP_VALIDATE": "/rest/config/templates/validate", - "TEMP_GET_SWITCHES": "/rest/control/policies/switches?serialNumber={}", - "TEMP_GET_SW_ROLES": "/rest/control/switches/roles", - "TEMPLATE": "/rest/config/templates/template", - "TEMP_DELETE_BULK": "/rest/config/templates/delete/bulk", - "TEMPLATE_WITH_NAME": "/rest/config/templates/{}" - }, + "TEMP_VALIDATE": "/rest/config/templates/validate", + "TEMP_GET_SWITCHES": "/rest/control/policies/switches?serialNumber={}", + "TEMP_GET_SW_ROLES": "/rest/control/switches/roles", + "TEMPLATE": "/rest/config/templates/template", + "TEMP_DELETE_BULK": "/rest/config/templates/delete/bulk", + "TEMPLATE_WITH_NAME": "/rest/config/templates/{}", + }, 12: { - "TEMP_VALIDATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/validate", - "TEMP_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", - "TEMP_GET_SW_ROLES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/switches/roles", - "TEMPLATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/template", - "TEMP_DELETE_BULK": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/delete/bulk", - "TEMPLATE_WITH_NAME": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/{}" - } + "TEMP_VALIDATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/validate", + "TEMP_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", + "TEMP_GET_SW_ROLES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/switches/roles", + "TEMPLATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/template", + "TEMP_DELETE_BULK": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/delete/bulk", + "TEMPLATE_WITH_NAME": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/{}", + }, } def __init__(self, module): @@ -216,9 +218,7 @@ def __init__(self, module): self.valid_fail = [] self.template_info = [] self.fd = None - self.changed_dict = [ - {"merged": [], "deleted": [], "query": [], "failed": []} - ] + self.changed_dict = [{"merged": [], "deleted": [], "query": [], "failed": []}] self.dcnm_version = dcnm_version_supported(self.module) @@ -237,7 +237,9 @@ def log_msg(self, msg): def dcnm_template_validate_input(self): if self.config is None: - self.module.fail_json(msg="config: parameter is required and cannot be empty") + self.module.fail_json( + msg="config: parameter is required and cannot be empty" + ) if self.module.params["state"] == "merged": template_spec = dict( @@ -256,7 +258,7 @@ def dcnm_template_validate_input(self): self.config, template_spec ) if invalid_params: - mesg = "Invalid parameters in playbook: {}".format(invalid_params) + mesg = "Invalid parameters in playbook: {0}".format(invalid_params) self.module.fail_json(msg=mesg) self.template_info.extend(template_info) @@ -280,10 +282,12 @@ def dcnm_template_get_template_payload(self, ditem): if self.module.params["state"] == "merged": - if (("template variables" not in ditem['content']) and ("template content" not in ditem["content"])): - std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n##\n##template variables\n##\n##template content\n" + if ("template variables" not in ditem["content"]) and ( + "template content" not in ditem["content"] + ): + std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n##\n##template variables\n##\n##template content\n" # noqa else: - std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n" + std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n" # noqa template_payload = {} @@ -319,9 +323,7 @@ def dcnm_template_compare_template(self, template): # have must be updated. match_pb = [ - t - for t in self.pb_input - if template["template_name"] == t["name"] + t for t in self.pb_input if template["template_name"] == t["name"] ][0] if match_pb: @@ -376,9 +378,7 @@ def dcnm_template_validate_template(self, template): path = self.paths["TEMP_VALIDATE"] - resp = dcnm_send( - self.module, "POST", path, template["content"], "text" - ) + resp = dcnm_send(self.module, "POST", path, template["content"], "text") if resp and resp["RETURN_CODE"] == 200 and resp["MESSAGE"] == "OK": # DATA may have multiple dicts with different reports. Check all reports and ignore warnings. @@ -386,14 +386,17 @@ def dcnm_template_validate_template(self, template): # resp['DATA'] may be a list in case of templates with no parameters. But for templates # with parameters resp['DATA'] will be a dict directly with 'status' as 'Template Validation Successful' - if isinstance(resp['DATA'], list): + if isinstance(resp["DATA"], list): for d in resp["DATA"]: - if d.get("reportItemType", ' ').lower() == "error": + if d.get("reportItemType", " ").lower() == "error": self.result["response"].append(resp) return 0 return resp["RETURN_CODE"] - elif isinstance(resp['DATA'], dict): - if resp['DATA'].get("status", ' ').lower() != "template validation successful": + elif isinstance(resp["DATA"], dict): + if ( + resp["DATA"].get("status", " ").lower() + != "template validation successful" + ): self.result["response"].append(resp) return 0 return resp["RETURN_CODE"] @@ -421,12 +424,12 @@ def dcnm_template_get_policy_list(self, snos, tlist): if policies.get(p["templateName"], None) is None: policies[p["templateName"]] = {} policies[p["templateName"]][p["policyId"]] = {} - policies[p["templateName"]][p["policyId"]][ + policies[p["templateName"]][p["policyId"]]["fabricName"] = p[ "fabricName" - ] = p["fabricName"] - policies[p["templateName"]][p["policyId"]][ + ] + policies[p["templateName"]][p["policyId"]]["serialNumber"] = p[ "serialNumber" - ] = p["serialNumber"] + ] return policies @@ -462,12 +465,7 @@ def dcnm_template_get_tlist_from_resp(self, resp): # Get the list of templates not deleted because they are in use. - tstr = ( - resp["DATA"] - .split("not deleted:")[1] - .replace("[", "") - .replace("]", "") - ) + tstr = resp["DATA"].split("not deleted:")[1].replace("[", "").replace("]", "") tstr = tstr.replace(" ", "") template_list = tstr.split(",") @@ -597,9 +595,7 @@ def dcnm_template_get_diff_deleted(self): for template in self.want: # Check if the template is present. If not ignore the request - match_temp = [ - t for t in self.have if template["name"] == t["name"] - ] + match_temp = [t for t in self.have if template["name"] == t["name"]] if match_temp: del_payload["fabTemplate"].append(template["name"]) @@ -640,9 +636,7 @@ def dcnm_template_send_message_to_dcnm(self): # First process delete list if self.diff_delete: - delete_flag = self.dcnm_template_delete_template( - self.diff_delete[0] - ) + delete_flag = self.dcnm_template_delete_template(self.diff_delete[0]) for template in self.diff_create: resp = self.dcnm_template_create_template(template) @@ -650,14 +644,14 @@ def dcnm_template_send_message_to_dcnm(self): resp = resp[0] if resp and resp["RETURN_CODE"] == 200: create_flag = True - if resp and resp['RETURN_CODE'] >= 400: + if resp and resp["RETURN_CODE"] >= 400: self.module.fail_json(msg=resp) self.result["changed"] = delete_flag or create_flag def dcnm_template_build_content(self, content, name, desc, tags, type): - std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n##\n##template content\n" + std_cont = "##template properties\nname = __TEMPLATE_NAME;\ndescription = __DESCRIPTION;\ntags = __TAGS;\nuserDefined = true;\nsupportedPlatforms = All;\ntemplateType = POLICY;\ntemplateSubType = DEVICE;\ncontentType = TEMPLATE_CLI;\nimplements = implements;\ndependencies = ;\npublished = false;\n##\n##template content\n" # noqa std_cont = std_cont.replace("__TEMPLATE_NAME", name) std_cont = std_cont.replace("__DESCRIPTION", desc) @@ -686,10 +680,9 @@ def dcnm_template_copy_config(self): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - config=dict(required=True, type="list", elements='dict'), + config=dict(required=True, type="list", elements="dict"), state=dict( type="str", default="merged", @@ -697,9 +690,7 @@ def main(): ), ) - module = AnsibleModule( - argument_spec=element_spec, supports_check_mode=True - ) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_template = DcnmTemplate(module) diff --git a/plugins/modules/dcnm_vrf.py b/plugins/modules/dcnm_vrf.py index 9b66b9db4..ef3add483 100644 --- a/plugins/modules/dcnm_vrf.py +++ b/plugins/modules/dcnm_vrf.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import, division, print_function -__author__ = "Shrishail Kariyappanavar, Karthik Babu Harichandra Babu, Praveen Ramoorthy" +__metaclass__ = type +__author__ = ( + "Shrishail Kariyappanavar, Karthik Babu Harichandra Babu, Praveen Ramoorthy" +) -DOCUMENTATION = ''' +DOCUMENTATION = """ --- module: dcnm_vrf short_description: Add and remove VRFs from a DCNM managed VXLAN fabric. @@ -144,9 +148,9 @@ - Global knob to control whether to deploy the attachment type: bool default: true -''' +""" -EXAMPLES = ''' +EXAMPLES = """ # This module supports the following states: # # Merged: @@ -336,49 +340,56 @@ config: - vrf_name: ansible-vrf-r1 - vrf_name: ansible-vrf-r2 -''' +""" import json import time import copy import ast import re -from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import \ - get_fabric_inventory_details, dcnm_send, validate_list_of_dicts, \ - dcnm_get_ip_addr_info, get_ip_sn_dict, get_fabric_details, get_ip_sn_fabric_dict, \ - dcnm_version_supported, dcnm_get_url +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + get_fabric_inventory_details, + dcnm_send, + validate_list_of_dicts, + dcnm_get_ip_addr_info, + get_ip_sn_dict, + get_fabric_details, + get_ip_sn_fabric_dict, + dcnm_version_supported, + dcnm_get_url, +) from ansible.module_utils.basic import AnsibleModule class DcnmVrf: - dcnm_vrf_paths={ + dcnm_vrf_paths = { 11: { - "GET_VRF": "/rest/top-down/fabrics/{}/vrfs", - "GET_VRF_ATTACH": "/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", - "GET_VRF_SWITCH": "/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", - "GET_VRF_ID": "/rest/managed-pool/fabrics/{}/partitions/ids", - "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN" - }, + "GET_VRF": "/rest/top-down/fabrics/{}/vrfs", + "GET_VRF_ATTACH": "/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", + "GET_VRF_SWITCH": "/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", + "GET_VRF_ID": "/rest/managed-pool/fabrics/{}/partitions/ids", + "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", + }, 12: { - "GET_VRF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", - "GET_VRF_ATTACH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", - "GET_VRF_SWITCH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", - "GET_VRF_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfinfo", - "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN" - } + "GET_VRF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", + "GET_VRF_ATTACH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", + "GET_VRF_SWITCH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", + "GET_VRF_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfinfo", + "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", + }, } def __init__(self, module): self.module = module self.params = module.params - self.fabric = module.params['fabric'] - self.config = copy.deepcopy(module.params.get('config')) + self.fabric = module.params["fabric"] + self.config = copy.deepcopy(module.params.get("config")) self.check_mode = False self.vrf_ext = False - self.role = '' - self.serial = '' + self.role = "" + self.serial = "" self.have_create = [] self.want_create = [] self.diff_create = [] @@ -410,18 +421,14 @@ def __init__(self, module): self.inventory_data = get_fabric_inventory_details(self.module, self.fabric) self.ip_sn, self.hn_sn = get_ip_sn_dict(self.inventory_data) self.fabric_data = get_fabric_details(self.module, self.fabric) - self.fabric_type = self.fabric_data.get('fabricType') + self.fabric_type = self.fabric_data.get("fabricType") self.ip_fab, self.sn_fab = get_ip_sn_fabric_dict(self.inventory_data) if self.dcnm_version > 12: self.paths = self.dcnm_vrf_paths[12] else: self.paths = self.dcnm_vrf_paths[self.dcnm_version] - self.result = dict( - changed=False, - diff=[], - response=[] - ) + self.result = dict(changed=False, diff=[], response=[]) self.failed_to_rollback = False self.WAIT_TIME_FOR_DELETE_LOOP = 5 # in seconds @@ -438,48 +445,83 @@ def diff_for_attach_deploy(self, want_a, have_a): found = False if have_a: for have in have_a: - if want['serialNumber'] == have['serialNumber']: - if want['extensionValues'] != "" and have['extensionValues'] != "": - want_ext_values = want['extensionValues'] + if want["serialNumber"] == have["serialNumber"]: + if ( + want["extensionValues"] != "" + and have["extensionValues"] != "" + ): + want_ext_values = want["extensionValues"] want_ext_values = ast.literal_eval(want_ext_values) - have_ext_values = have['extensionValues'] + have_ext_values = have["extensionValues"] have_ext_values = ast.literal_eval(have_ext_values) - want_e = ast.literal_eval(want_ext_values['VRF_LITE_CONN']) - have_e = ast.literal_eval(have_ext_values['VRF_LITE_CONN']) - - if want_e['VRF_LITE_CONN'][0]['IF_NAME'] == have_e['VRF_LITE_CONN'][0]['IF_NAME']: - if want_e['VRF_LITE_CONN'][0]['DOT1Q_ID'] == have_e['VRF_LITE_CONN'][0]['DOT1Q_ID']: - if want_e['VRF_LITE_CONN'][0]['IP_MASK'] == have_e['VRF_LITE_CONN'][0]['IP_MASK']: - if want_e['VRF_LITE_CONN'][0]['NEIGHBOR_IP'] == \ - have_e['VRF_LITE_CONN'][0]['NEIGHBOR_IP']: - if want_e['VRF_LITE_CONN'][0]['IPV6_MASK'] == \ - have_e['VRF_LITE_CONN'][0]['IPV6_MASK']: - if want_e['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR'] == \ - have_e['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR']: - if want_e['VRF_LITE_CONN'][0]['PEER_VRF_NAME'] == \ - have_e['VRF_LITE_CONN'][0]['PEER_VRF_NAME']: + want_e = ast.literal_eval(want_ext_values["VRF_LITE_CONN"]) + have_e = ast.literal_eval(have_ext_values["VRF_LITE_CONN"]) + + if ( + want_e["VRF_LITE_CONN"][0]["IF_NAME"] + == have_e["VRF_LITE_CONN"][0]["IF_NAME"] + ): + if ( + want_e["VRF_LITE_CONN"][0]["DOT1Q_ID"] + == have_e["VRF_LITE_CONN"][0]["DOT1Q_ID"] + ): + if ( + want_e["VRF_LITE_CONN"][0]["IP_MASK"] + == have_e["VRF_LITE_CONN"][0]["IP_MASK"] + ): + if ( + want_e["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] + == have_e["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] + ): + if ( + want_e["VRF_LITE_CONN"][0]["IPV6_MASK"] + == have_e["VRF_LITE_CONN"][0][ + "IPV6_MASK" + ] + ): + if ( + want_e["VRF_LITE_CONN"][0][ + "IPV6_NEIGHBOR" + ] + == have_e["VRF_LITE_CONN"][0][ + "IPV6_NEIGHBOR" + ] + ): + if ( + want_e["VRF_LITE_CONN"][0][ + "PEER_VRF_NAME" + ] + == have_e["VRF_LITE_CONN"][0][ + "PEER_VRF_NAME" + ] + ): found = True - elif want['extensionValues'] != "" or have['extensionValues'] != "": + elif ( + want["extensionValues"] != "" + or have["extensionValues"] != "" + ): found = False else: found = True # When the attachment is to be detached and undeployed, ignore any changes # to the attach section in the want(i.e in the playbook). - if want.get('isAttached') is not None: - if bool(have['isAttached']) is not bool(want['isAttached']): - del want['isAttached'] + if want.get("isAttached") is not None: + if bool(have["isAttached"]) is not bool( + want["isAttached"] + ): + del want["isAttached"] attach_list.append(want) continue - if bool(have['deployment']) is not bool(want['deployment']): + if bool(have["deployment"]) is not bool(want["deployment"]): dep_vrf = True if not found: - if bool(want['deployment']): - del want['isAttached'] + if bool(want["deployment"]): + del want["isAttached"] attach_list.append(want) return attach_list, dep_vrf @@ -490,77 +532,103 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId): return {} serial = "" - attach['ip_address'] = dcnm_get_ip_addr_info(self.module, attach['ip_address'], None, None) + attach["ip_address"] = dcnm_get_ip_addr_info( + self.module, attach["ip_address"], None, None + ) for ip, ser in self.ip_sn.items(): - if ip == attach['ip_address']: + if ip == attach["ip_address"]: serial = ser self.serial = ser if not serial: - self.module.fail_json(msg='Fabric: {} does not have the switch: {}' - .format(self.fabric, attach['ip_address'])) + self.module.fail_json( + msg="Fabric: {0} does not have the switch: {1}".format( + self.fabric, attach["ip_address"] + ) + ) - role = self.inventory_data[attach['ip_address']].get('switchRole') + role = self.inventory_data[attach["ip_address"]].get("switchRole") self.role = role - if role.lower() == 'spine' or role.lower() == 'super spine': - msg = 'VRFs cannot be attached to switch {} with role {}'.format(attach['ip_address'], role) + if role.lower() == "spine" or role.lower() == "super spine": + msg = "VRFs cannot be attached to switch {0} with role {1}".format( + attach["ip_address"], role + ) self.module.fail_json(msg=msg) ext_values = {} - if attach['vrf_lite']: - '''Before apply the vrf_lite config, need double check if the swtich role is started wth Border''' - r = re.search(r'\bborder\b', role.lower()) + if attach["vrf_lite"]: + """Before apply the vrf_lite config, need double check if the swtich role is started wth Border""" + r = re.search(r"\bborder\b", role.lower()) if not r: - msg = 'VRF LITE cannot be attached to switch {} with role {}'.format(attach['ip_address'], role) + msg = "VRF LITE cannot be attached to switch {0} with role {1}".format( + attach["ip_address"], role + ) self.module.fail_json(msg=msg) - at_lite = attach['vrf_lite'] + at_lite = attach["vrf_lite"] for a_l in at_lite: - if a_l['interface'] and a_l['dot1q'] and a_l['ipv4_addr'] and a_l['neighbor_ipv4'] and a_l['ipv6_addr'] \ - and a_l['neighbor_ipv6'] and a_l['peer_vrf']: - - ''' if all the elements are provided by the user in the playbook fill the extension values''' + if ( + a_l["interface"] + and a_l["dot1q"] + and a_l["ipv4_addr"] + and a_l["neighbor_ipv4"] + and a_l["ipv6_addr"] + and a_l["neighbor_ipv6"] + and a_l["peer_vrf"] + ): + + """if all the elements are provided by the user in the playbook fill the extension values""" vrflite_con = {} - vrflite_con['VRF_LITE_CONN'] = [] - vrflite_con['VRF_LITE_CONN'].append({}) - - vrflite_con['VRF_LITE_CONN'][0]['IF_NAME'] = a_l['interface'] - vrflite_con['VRF_LITE_CONN'][0]['DOT1Q_ID'] = str(a_l['dot1q']) - vrflite_con['VRF_LITE_CONN'][0]['IP_MASK'] = a_l['ipv4_addr'] - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_IP'] = a_l['neighbor_ipv4'] - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_ASN'] = '65535' - vrflite_con['VRF_LITE_CONN'][0]['IPV6_MASK'] = a_l['ipv6_addr'] - vrflite_con['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR'] = a_l['neighbor_ipv6'] - vrflite_con['VRF_LITE_CONN'][0]['AUTO_VRF_LITE_FLAG'] = 'false' - vrflite_con['VRF_LITE_CONN'][0]['PEER_VRF_NAME'] = a_l['peer_vrf'] - vrflite_con['VRF_LITE_CONN'][0]['VRF_LITE_JYTHON_TEMPLATE'] = 'Ext_VRF_Lite_Jython' - ext_values['VRF_LITE_CONN'] = json.dumps(vrflite_con) + vrflite_con["VRF_LITE_CONN"] = [] + vrflite_con["VRF_LITE_CONN"].append({}) + + vrflite_con["VRF_LITE_CONN"][0]["IF_NAME"] = a_l["interface"] + vrflite_con["VRF_LITE_CONN"][0]["DOT1Q_ID"] = str(a_l["dot1q"]) + vrflite_con["VRF_LITE_CONN"][0]["IP_MASK"] = a_l["ipv4_addr"] + vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] = a_l[ + "neighbor_ipv4" + ] + vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_ASN"] = "65535" + vrflite_con["VRF_LITE_CONN"][0]["IPV6_MASK"] = a_l["ipv6_addr"] + vrflite_con["VRF_LITE_CONN"][0]["IPV6_NEIGHBOR"] = a_l[ + "neighbor_ipv6" + ] + vrflite_con["VRF_LITE_CONN"][0]["AUTO_VRF_LITE_FLAG"] = "false" + vrflite_con["VRF_LITE_CONN"][0]["PEER_VRF_NAME"] = a_l["peer_vrf"] + vrflite_con["VRF_LITE_CONN"][0][ + "VRF_LITE_JYTHON_TEMPLATE" + ] = "Ext_VRF_Lite_Jython" + ext_values["VRF_LITE_CONN"] = json.dumps(vrflite_con) ms_con = {} - ms_con['MULTISITE_CONN'] = [] - ext_values['MULTISITE_CONN'] = json.dumps(ms_con) + ms_con["MULTISITE_CONN"] = [] + ext_values["MULTISITE_CONN"] = json.dumps(ms_con) self.vrflitevalues = ext_values self.vrf_ext = True - attach.update({'fabric': self.fabric}) - attach.update({'vrfName': vrf_name}) - attach.update({'vlan': vlanId}) - attach.update({'deployment': deploy}) - attach.update({'isAttached': deploy}) - attach.update({'serialNumber': serial}) + attach.update({"fabric": self.fabric}) + attach.update({"vrfName": vrf_name}) + attach.update({"vlan": vlanId}) + attach.update({"deployment": deploy}) + attach.update({"isAttached": deploy}) + attach.update({"serialNumber": serial}) if self.vrf_ext: - attach.update({'extensionValues': json.dumps(ext_values).replace(' ', '')}) - attach.update({'instanceValues': "{\"loopbackId\":\"\",\"loopbackIpAddress\":\"\",\"loopbackIpV6Address\":\"\"}"}) - del attach['vrf_lite'] + attach.update({"extensionValues": json.dumps(ext_values).replace(" ", "")}) + attach.update( + { + "instanceValues": '{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}' + } + ) + del attach["vrf_lite"] else: - attach.update({'extensionValues': ""}) - attach.update({'instanceValues': ""}) - attach.update({'freeformConfig': ""}) - if 'deploy' in attach: - del attach['deploy'] - del attach['ip_address'] + attach.update({"extensionValues": ""}) + attach.update({"instanceValues": ""}) + attach.update({"freeformConfig": ""}) + if "deploy" in attach: + del attach["deploy"] + del attach["ip_address"] return attach @@ -571,73 +639,88 @@ def diff_for_create(self, want, have): create = {} - json_to_dict_want = json.loads(want['vrfTemplateConfig']) - json_to_dict_have = json.loads(have['vrfTemplateConfig']) + json_to_dict_want = json.loads(want["vrfTemplateConfig"]) + json_to_dict_have = json.loads(have["vrfTemplateConfig"]) - vlanId_want = str(json_to_dict_want.get('vlanId', "")) - vlanId_have = json_to_dict_have.get('vlanId', "") + vlanId_want = str(json_to_dict_want.get("vlanId", "")) + vlanId_have = json_to_dict_have.get("vlanId", "") if vlanId_want != "0": - if want['vrfId'] is not None and have['vrfId'] != want['vrfId']: - self.module.fail_json(msg="vrf_id for vrf:{} cant be updated to a different value".format(want['vrfName'])) - elif have['serviceVrfTemplate'] != want['serviceVrfTemplate'] or \ - have['vrfTemplate'] != want['vrfTemplate'] or \ - have['vrfExtensionTemplate'] != want['vrfExtensionTemplate'] or \ - vlanId_have != vlanId_want: - - if want['vrfId'] is None: + if want["vrfId"] is not None and have["vrfId"] != want["vrfId"]: + self.module.fail_json( + msg="vrf_id for vrf:{0} cant be updated to a different value".format( + want["vrfName"] + ) + ) + elif ( + have["serviceVrfTemplate"] != want["serviceVrfTemplate"] + or have["vrfTemplate"] != want["vrfTemplate"] + or have["vrfExtensionTemplate"] != want["vrfExtensionTemplate"] + or vlanId_have != vlanId_want + ): + + if want["vrfId"] is None: # The vrf updates with missing vrfId will have to use existing # vrfId from the instance of the same vrf on DCNM. - want['vrfId'] = have['vrfId'] + want["vrfId"] = have["vrfId"] create = want else: pass else: - if want['vrfId'] is not None and have['vrfId'] != want['vrfId']: + if want["vrfId"] is not None and have["vrfId"] != want["vrfId"]: self.module.fail_json( - msg="vrf_id for vrf:{} cant be updated to a different value".format(want['vrfName'])) - elif have['serviceVrfTemplate'] != want['serviceVrfTemplate'] or \ - have['vrfTemplate'] != want['vrfTemplate'] or \ - have['vrfExtensionTemplate'] != want['vrfExtensionTemplate']: - - if want['vrfId'] is None: + msg="vrf_id for vrf:{0} cant be updated to a different value".format( + want["vrfName"] + ) + ) + elif ( + have["serviceVrfTemplate"] != want["serviceVrfTemplate"] + or have["vrfTemplate"] != want["vrfTemplate"] + or have["vrfExtensionTemplate"] != want["vrfExtensionTemplate"] + ): + + if want["vrfId"] is None: # The vrf updates with missing vrfId will have to use existing # vrfId from the instance of the same vrf on DCNM. - want['vrfId'] = have['vrfId'] + want["vrfId"] = have["vrfId"] create = want else: pass return create - def update_create_params(self, vrf, vlanId=''): + def update_create_params(self, vrf, vlanId=""): if not vrf: return vrf - v_template = vrf.get('vrf_template', 'Default_VRF_Universal') - ve_template = vrf.get('vrf_extension_template', 'Default_VRF_Extension_Universal') + v_template = vrf.get("vrf_template", "Default_VRF_Universal") + ve_template = vrf.get( + "vrf_extension_template", "Default_VRF_Extension_Universal" + ) src = None - s_v_template = vrf.get('service_vrf_template', None) + s_v_template = vrf.get("service_vrf_template", None) vrf_upd = { - 'fabric': self.fabric, - 'vrfName': vrf['vrf_name'], - 'vrfTemplate': v_template, - 'vrfExtensionTemplate': ve_template, - 'vrfId': vrf.get('vrf_id', None), # vrf_id will be auto generated in get_diff_merge() - 'serviceVrfTemplate': s_v_template, - 'source': src + "fabric": self.fabric, + "vrfName": vrf["vrf_name"], + "vrfTemplate": v_template, + "vrfExtensionTemplate": ve_template, + "vrfId": vrf.get( + "vrf_id", None + ), # vrf_id will be auto generated in get_diff_merge() + "serviceVrfTemplate": s_v_template, + "source": src, } template_conf = { - 'vrfSegmentId': vrf.get('vrf_id', None), - 'vrfName': vrf['vrf_name'], - 'vlanId': vlanId + "vrfSegmentId": vrf.get("vrf_id", None), + "vrfName": vrf["vrf_name"], + "vlanId": vlanId, } - vrf_upd.update({'vrfTemplateConfig': json.dumps(template_conf)}) + vrf_upd.update({"vrfTemplateConfig": json.dumps(template_conf)}) return vrf_upd @@ -646,138 +729,174 @@ def get_have(self): have_create = [] have_deploy = {} - curr_vrfs = '' + curr_vrfs = "" - method = 'GET' + method = "GET" path = self.paths["GET_VRF"].format(self.fabric) vrf_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(vrf_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find vrfs under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find vrfs under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not vrf_objects.get('DATA'): + if not vrf_objects.get("DATA"): return - for vrf in vrf_objects['DATA']: - curr_vrfs += vrf['vrfName'] + ',' + for vrf in vrf_objects["DATA"]: + curr_vrfs += vrf["vrfName"] + "," - vrf_attach_objects = dcnm_get_url(self.module, self.fabric, self.paths["GET_VRF_ATTACH"], curr_vrfs[:-1], "vrfs") + vrf_attach_objects = dcnm_get_url( + self.module, + self.fabric, + self.paths["GET_VRF_ATTACH"], + curr_vrfs[:-1], + "vrfs", + ) - if not vrf_attach_objects['DATA']: + if not vrf_attach_objects["DATA"]: return - for vrf in vrf_objects['DATA']: - json_to_dict = json.loads(vrf['vrfTemplateConfig']) + for vrf in vrf_objects["DATA"]: + json_to_dict = json.loads(vrf["vrfTemplateConfig"]) t_conf = { - 'vrfSegmentId': vrf['vrfId'], - 'vrfName': vrf['vrfName'], - 'vlanId': json_to_dict.get('vlanId', 0) + "vrfSegmentId": vrf["vrfId"], + "vrfName": vrf["vrfName"], + "vlanId": json_to_dict.get("vlanId", 0), } - vrf.update({'vrfTemplateConfig': json.dumps(t_conf)}) - del vrf['vrfStatus'] + vrf.update({"vrfTemplateConfig": json.dumps(t_conf)}) + del vrf["vrfStatus"] have_create.append(vrf) - upd_vrfs = '' + upd_vrfs = "" - for vrf_attach in vrf_attach_objects['DATA']: - if not vrf_attach.get('lanAttachList'): + for vrf_attach in vrf_attach_objects["DATA"]: + if not vrf_attach.get("lanAttachList"): continue - attach_list = vrf_attach['lanAttachList'] - dep_vrf = '' + attach_list = vrf_attach["lanAttachList"] + dep_vrf = "" for attach in attach_list: - attach_state = False if attach['lanAttachState'] == "NA" else True - deploy = attach['isLanAttached'] - if bool(deploy) and (attach['lanAttachState'] == "OUT-OF-SYNC" or - attach['lanAttachState'] == "PENDING"): + attach_state = False if attach["lanAttachState"] == "NA" else True + deploy = attach["isLanAttached"] + if bool(deploy) and ( + attach["lanAttachState"] == "OUT-OF-SYNC" + or attach["lanAttachState"] == "PENDING" + ): deploy = False if bool(deploy): - dep_vrf = attach['vrfName'] + dep_vrf = attach["vrfName"] - sn = attach['switchSerialNo'] - vlan = attach['vlanId'] + sn = attach["switchSerialNo"] + vlan = attach["vlanId"] # The deletes and updates below are done to update the incoming dictionary format to # match to what the outgoing payload requirements mandate. # Ex: 'vlanId' in the attach section of incoming payload needs to be changed to 'vlan' # on the attach section of outgoing payload. - del attach['vlanId'] - del attach['switchSerialNo'] - del attach['switchName'] - del attach['switchRole'] - del attach['ipAddress'] - del attach['lanAttachState'] - del attach['isLanAttached'] - del attach['vrfId'] - del attach['fabricName'] - - attach.update({'fabric': self.fabric}) - attach.update({'vlan': vlan}) - attach.update({'serialNumber': sn}) - attach.update({'deployment': deploy}) - attach.update({'extensionValues': ""}) - attach.update({'instanceValues': ""}) - attach.update({'freeformConfig': ""}) - attach.update({'isAttached': attach_state}) - - ''' Get the VRF LITE extension template and update it to the attach['extensionvalues']''' - - '''Get the IP/Interface that is connected to edge router can be get from below query''' - method = 'GET' - path = self.paths["GET_VRF_SWITCH"].format(self.fabric, attach['vrfName'], sn) + del attach["vlanId"] + del attach["switchSerialNo"] + del attach["switchName"] + del attach["switchRole"] + del attach["ipAddress"] + del attach["lanAttachState"] + del attach["isLanAttached"] + del attach["vrfId"] + del attach["fabricName"] + + attach.update({"fabric": self.fabric}) + attach.update({"vlan": vlan}) + attach.update({"serialNumber": sn}) + attach.update({"deployment": deploy}) + attach.update({"extensionValues": ""}) + attach.update({"instanceValues": ""}) + attach.update({"freeformConfig": ""}) + attach.update({"isAttached": attach_state}) + + """ Get the VRF LITE extension template and update it to the attach['extensionvalues']""" + + """Get the IP/Interface that is connected to edge router can be get from below query""" + method = "GET" + path = self.paths["GET_VRF_SWITCH"].format( + self.fabric, attach["vrfName"], sn + ) lite_objects = dcnm_send(self.module, method, path) - if not lite_objects.get('DATA'): + if not lite_objects.get("DATA"): return - for sdl in lite_objects['DATA']: - for epv in sdl['switchDetailsList']: - if epv.get('extensionValues'): - ext_values = epv['extensionValues'] + for sdl in lite_objects["DATA"]: + for epv in sdl["switchDetailsList"]: + if epv.get("extensionValues"): + ext_values = epv["extensionValues"] ext_values = ast.literal_eval(ext_values) - if ext_values.get('VRF_LITE_CONN') is not None: - ext_values = ast.literal_eval(ext_values['VRF_LITE_CONN']) - for ev in ext_values['VRF_LITE_CONN']: + if ext_values.get("VRF_LITE_CONN") is not None: + ext_values = ast.literal_eval( + ext_values["VRF_LITE_CONN"] + ) + for ev in ext_values["VRF_LITE_CONN"]: extension_values = {} vrflite_con = {} - vrflite_con['VRF_LITE_CONN'] = [] - vrflite_con['VRF_LITE_CONN'].append({}) - vrflite_con['VRF_LITE_CONN'][0]['IF_NAME'] = ev['IF_NAME'] - vrflite_con['VRF_LITE_CONN'][0]['DOT1Q_ID'] = str(ev['DOT1Q_ID']) - vrflite_con['VRF_LITE_CONN'][0]['IP_MASK'] = ev['IP_MASK'] - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_IP'] = ev['NEIGHBOR_IP'] - vrflite_con['VRF_LITE_CONN'][0]['IPV6_MASK'] = ev['IPV6_MASK'] - vrflite_con['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR'] = ev['IPV6_NEIGHBOR'] - - vrflite_con['VRF_LITE_CONN'][0]['AUTO_VRF_LITE_FLAG'] = 'false' - vrflite_con['VRF_LITE_CONN'][0]['PEER_VRF_NAME'] = attach['vrfName'] - vrflite_con['VRF_LITE_CONN'][0]['VRF_LITE_JYTHON_TEMPLATE'] = 'Ext_VRF_Lite_Jython' - extension_values['VRF_LITE_CONN'] = json.dumps(vrflite_con) + vrflite_con["VRF_LITE_CONN"] = [] + vrflite_con["VRF_LITE_CONN"].append({}) + vrflite_con["VRF_LITE_CONN"][0]["IF_NAME"] = ev[ + "IF_NAME" + ] + vrflite_con["VRF_LITE_CONN"][0]["DOT1Q_ID"] = str( + ev["DOT1Q_ID"] + ) + vrflite_con["VRF_LITE_CONN"][0]["IP_MASK"] = ev[ + "IP_MASK" + ] + vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] = ev[ + "NEIGHBOR_IP" + ] + vrflite_con["VRF_LITE_CONN"][0]["IPV6_MASK"] = ev[ + "IPV6_MASK" + ] + vrflite_con["VRF_LITE_CONN"][0][ + "IPV6_NEIGHBOR" + ] = ev["IPV6_NEIGHBOR"] + + vrflite_con["VRF_LITE_CONN"][0][ + "AUTO_VRF_LITE_FLAG" + ] = "false" + vrflite_con["VRF_LITE_CONN"][0][ + "PEER_VRF_NAME" + ] = attach["vrfName"] + vrflite_con["VRF_LITE_CONN"][0][ + "VRF_LITE_JYTHON_TEMPLATE" + ] = "Ext_VRF_Lite_Jython" + extension_values["VRF_LITE_CONN"] = json.dumps( + vrflite_con + ) ms_con = {} - ms_con['MULTISITE_CONN'] = [] - extension_values['MULTISITE_CONN'] = json.dumps(ms_con) - e_values = json.dumps(extension_values).replace(' ', '') + ms_con["MULTISITE_CONN"] = [] + extension_values["MULTISITE_CONN"] = json.dumps( + ms_con + ) + e_values = json.dumps(extension_values).replace( + " ", "" + ) - attach.update({'extensionValues': e_values}) + attach.update({"extensionValues": e_values}) if dep_vrf: upd_vrfs += dep_vrf + "," - have_attach = vrf_attach_objects['DATA'] + have_attach = vrf_attach_objects["DATA"] if upd_vrfs: - have_deploy.update({'vrfNames': upd_vrfs[:-1]}) + have_deploy.update({"vrfNames": upd_vrfs[:-1]}) self.have_create = have_create self.have_attach = have_attach @@ -798,32 +917,31 @@ def get_want(self): vrf_attach = {} vrfs = [] - vrf_deploy = vrf.get('deploy', True) - if vrf.get('vlan_id'): - vlanId = vrf.get('vlan_id') + vrf_deploy = vrf.get("deploy", True) + if vrf.get("vlan_id"): + vlanId = vrf.get("vlan_id") else: vlanId = 0 want_create.append(self.update_create_params(vrf, vlanId)) - if not vrf.get('attach'): + if not vrf.get("attach"): continue - for attach in vrf['attach']: - deploy = vrf_deploy if "deploy" not in attach else attach['deploy'] - vrfs.append(self.update_attach_params(attach, - vrf['vrf_name'], - deploy, - vlanId)) + for attach in vrf["attach"]: + deploy = vrf_deploy if "deploy" not in attach else attach["deploy"] + vrfs.append( + self.update_attach_params(attach, vrf["vrf_name"], deploy, vlanId) + ) if vrfs: - vrf_attach.update({'vrfName': vrf['vrf_name']}) - vrf_attach.update({'lanAttachList': vrfs}) + vrf_attach.update({"vrfName": vrf["vrf_name"]}) + vrf_attach.update({"lanAttachList": vrfs}) want_attach.append(vrf_attach) - all_vrfs += vrf['vrf_name'] + "," + all_vrfs += vrf["vrf_name"] + "," if all_vrfs: - want_deploy.update({'vrfNames': all_vrfs[:-1]}) + want_deploy.update({"vrfNames": all_vrfs[:-1]}) self.want_create = want_create self.want_attach = want_attach @@ -835,51 +953,65 @@ def get_diff_delete(self): diff_undeploy = {} diff_delete = {} - all_vrfs = '' + all_vrfs = "" if self.config: for want_c in self.want_create: - if not next((have_c for have_c in self.have_create if have_c['vrfName'] == want_c['vrfName']), None): + if not next( + ( + have_c + for have_c in self.have_create + if have_c["vrfName"] == want_c["vrfName"] + ), + None, + ): continue - diff_delete.update({want_c['vrfName']: 'DEPLOYED'}) + diff_delete.update({want_c["vrfName"]: "DEPLOYED"}) - have_a = next((attach for attach in self.have_attach if attach['vrfName'] == want_c['vrfName']), None) + have_a = next( + ( + attach + for attach in self.have_attach + if attach["vrfName"] == want_c["vrfName"] + ), + None, + ) if not have_a: continue to_del = [] - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_vrfs += have_a['vrfName'] + "," + all_vrfs += have_a["vrfName"] + "," if all_vrfs: - diff_undeploy.update({'vrfNames': all_vrfs[:-1]}) + diff_undeploy.update({"vrfNames": all_vrfs[:-1]}) else: for have_a in self.have_attach: to_del = [] - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_vrfs += have_a['vrfName'] + "," + all_vrfs += have_a["vrfName"] + "," - diff_delete.update({have_a['vrfName']: 'DEPLOYED'}) + diff_delete.update({have_a["vrfName"]: "DEPLOYED"}) if all_vrfs: - diff_undeploy.update({'vrfNames': all_vrfs[:-1]}) + diff_undeploy.update({"vrfNames": all_vrfs[:-1]}) self.diff_detach = diff_detach self.diff_undeploy = diff_undeploy @@ -887,7 +1019,7 @@ def get_diff_delete(self): def get_diff_override(self): - all_vrfs = '' + all_vrfs = "" diff_delete = {} self.get_diff_replace() @@ -899,26 +1031,33 @@ def get_diff_override(self): diff_undeploy = self.diff_undeploy for have_a in self.have_attach: - found = next((vrf for vrf in self.want_create if vrf['vrfName'] == have_a['vrfName']), None) + found = next( + ( + vrf + for vrf in self.want_create + if vrf["vrfName"] == have_a["vrfName"] + ), + None, + ) to_del = [] if not found: - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if a_h['isAttached']: - del a_h['isAttached'] - a_h.update({'deployment': False}) + if a_h["isAttached"]: + del a_h["isAttached"] + a_h.update({"deployment": False}) to_del.append(a_h) if to_del: - have_a.update({'lanAttachList': to_del}) + have_a.update({"lanAttachList": to_del}) diff_detach.append(have_a) - all_vrfs += have_a['vrfName'] + "," + all_vrfs += have_a["vrfName"] + "," - diff_delete.update({have_a['vrfName']: 'DEPLOYED'}) + diff_delete.update({have_a["vrfName"]: "DEPLOYED"}) if all_vrfs: - diff_undeploy.update({'vrfNames': all_vrfs[:-1]}) + diff_undeploy.update({"vrfNames": all_vrfs[:-1]}) self.diff_create = diff_create self.diff_attach = diff_attach @@ -929,7 +1068,7 @@ def get_diff_override(self): def get_diff_replace(self): - all_vrfs = '' + all_vrfs = "" self.get_diff_merge() diff_create = self.diff_create @@ -940,54 +1079,61 @@ def get_diff_replace(self): r_vrf_list = [] h_in_w = False for want_a in self.want_attach: - if have_a['vrfName'] == want_a['vrfName']: + if have_a["vrfName"] == want_a["vrfName"]: h_in_w = True - atch_h = have_a['lanAttachList'] - atch_w = want_a.get('lanAttachList') + atch_h = have_a["lanAttachList"] + atch_w = want_a.get("lanAttachList") for a_h in atch_h: - if not a_h['isAttached']: + if not a_h["isAttached"]: continue a_match = False if atch_w: for a_w in atch_w: - if a_h['serialNumber'] == a_w['serialNumber']: + if a_h["serialNumber"] == a_w["serialNumber"]: # Have is already in diff, no need to continue looking for it. a_match = True break if not a_match: - del a_h['isAttached'] - a_h.update({'deployment': False}) + del a_h["isAttached"] + a_h.update({"deployment": False}) r_vrf_list.append(a_h) break if not h_in_w: - found = next((vrf for vrf in self.want_create if vrf['vrfName'] == have_a['vrfName']), None) + found = next( + ( + vrf + for vrf in self.want_create + if vrf["vrfName"] == have_a["vrfName"] + ), + None, + ) if found: - atch_h = have_a['lanAttachList'] + atch_h = have_a["lanAttachList"] for a_h in atch_h: - if not bool(a_h['isAttached']): + if not bool(a_h["isAttached"]): continue - del a_h['isAttached'] - a_h.update({'deployment': False}) + del a_h["isAttached"] + a_h.update({"deployment": False}) r_vrf_list.append(a_h) if r_vrf_list: in_diff = False for d_attach in self.diff_attach: - if have_a['vrfName'] == d_attach['vrfName']: + if have_a["vrfName"] == d_attach["vrfName"]: in_diff = True - d_attach['lanAttachList'].extend(r_vrf_list) + d_attach["lanAttachList"].extend(r_vrf_list) break if not in_diff: r_vrf_dict = { - 'vrfName': have_a['vrfName'], - 'lanAttachList': r_vrf_list + "vrfName": have_a["vrfName"], + "lanAttachList": r_vrf_list, } diff_attach.append(r_vrf_dict) - all_vrfs += have_a['vrfName'] + "," + all_vrfs += have_a["vrfName"] + "," if not all_vrfs: self.diff_create = diff_create @@ -996,10 +1142,10 @@ def get_diff_replace(self): return if not self.diff_deploy: - diff_deploy.update({'vrfNames': all_vrfs[:-1]}) + diff_deploy.update({"vrfNames": all_vrfs[:-1]}) else: - vrfs = self.diff_deploy['vrfNames'] + "," + all_vrfs[:-1] - diff_deploy.update({'vrfNames': vrfs}) + vrfs = self.diff_deploy["vrfNames"] + "," + all_vrfs[:-1] + diff_deploy.update({"vrfNames": vrfs}) self.diff_create = diff_create self.diff_attach = diff_attach @@ -1026,61 +1172,73 @@ def get_diff_merge(self): for want_c in self.want_create: vrf_found = False for have_c in self.have_create: - if want_c['vrfName'] == have_c['vrfName']: + if want_c["vrfName"] == have_c["vrfName"]: vrf_found = True diff = self.diff_for_create(want_c, have_c) if diff: diff_create_update.append(diff) break if not vrf_found: - vrf_id = want_c.get('vrfId', None) + vrf_id = want_c.get("vrfId", None) if vrf_id is None: # vrfId is not provided by user. # Need to query DCNM to fetch next available vrfId and use it here. - method = 'POST' + method = "POST" attempt = 0 - while True and attempt < 10: + while attempt < 10: attempt += 1 path = self.paths["GET_VRF_ID"].format(self.fabric) if self.dcnm_version > 11: - vrf_id_obj = dcnm_send(self.module, 'GET', path) + vrf_id_obj = dcnm_send(self.module, "GET", path) else: vrf_id_obj = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_id_obj, 'query_dcnm') + missing_fabric, not_ok = self.handle_response( + vrf_id_obj, "query_dcnm" + ) if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to generate vrfId for vrf: {} " \ - "under fabric: {}".format(want_c['vrfName'], self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = ( + "Unable to generate vrfId for vrf: {0} " + "under fabric: {1}".format( + want_c["vrfName"], self.fabric + ) + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not vrf_id_obj['DATA']: + if not vrf_id_obj["DATA"]: continue if self.dcnm_version == 11: - vrf_id = vrf_id_obj['DATA'].get('partitionSegmentId') + vrf_id = vrf_id_obj["DATA"].get("partitionSegmentId") elif self.dcnm_version >= 12: - vrf_id = vrf_id_obj['DATA'].get('l3vni') + vrf_id = vrf_id_obj["DATA"].get("l3vni") else: - msg = "Unsupported DCNM version: version {}".format(self.dcnm_version) + msg = "Unsupported DCNM version: version {0}".format( + self.dcnm_version + ) self.module.fail_json(msg) if vrf_id != prev_vrf_id_fetched: - want_c.update({'vrfId': vrf_id}) + want_c.update({"vrfId": vrf_id}) template_conf = { - 'vrfSegmentId': vrf_id, - 'vrfName': want_c['vrfName'] + "vrfSegmentId": vrf_id, + "vrfName": want_c["vrfName"], } - want_c.update({'vrfTemplateConfig': json.dumps(template_conf)}) + want_c.update( + {"vrfTemplateConfig": json.dumps(template_conf)} + ) prev_vrf_id_fetched = vrf_id break if not vrf_id: - self.module.fail_json(msg="Unable to generate vrfId for vrf: {} " - "under fabric: {}".format(want_c['vrfName'], self.fabric)) + self.module.fail_json( + msg="Unable to generate vrfId for vrf: {0} " + "under fabric: {1}".format(want_c["vrfName"], self.fabric) + ) create_path = self.paths["GET_VRF"].format(self.fabric) @@ -1089,10 +1247,11 @@ def get_diff_merge(self): if self.module.check_mode: continue - resp = dcnm_send(self.module, method, create_path, - json.dumps(want_c)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + resp = dcnm_send( + self.module, method, create_path, json.dumps(want_c) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: self.failure(resp) @@ -1100,43 +1259,45 @@ def get_diff_merge(self): diff_create.append(want_c) for want_a in self.want_attach: - dep_vrf = '' + dep_vrf = "" attach_found = False for have_a in self.have_attach: - if want_a['vrfName'] == have_a['vrfName']: + if want_a["vrfName"] == have_a["vrfName"]: attach_found = True - diff, vrf = self.diff_for_attach_deploy(want_a['lanAttachList'], have_a['lanAttachList']) + diff, vrf = self.diff_for_attach_deploy( + want_a["lanAttachList"], have_a["lanAttachList"] + ) if diff: base = want_a.copy() - del base['lanAttachList'] - base.update({'lanAttachList': diff}) + del base["lanAttachList"] + base.update({"lanAttachList": diff}) diff_attach.append(base) - dep_vrf = want_a['vrfName'] + dep_vrf = want_a["vrfName"] else: if vrf: - dep_vrf = want_a['vrfName'] + dep_vrf = want_a["vrfName"] - if not attach_found and want_a.get('lanAttachList'): + if not attach_found and want_a.get("lanAttachList"): atch_list = [] - for attach in want_a['lanAttachList']: - if attach.get('isAttached'): - del attach['isAttached'] - if bool(attach['deployment']): + for attach in want_a["lanAttachList"]: + if attach.get("isAttached"): + del attach["isAttached"] + if bool(attach["deployment"]): atch_list.append(attach) if atch_list: base = want_a.copy() - del base['lanAttachList'] - base.update({'lanAttachList': atch_list}) + del base["lanAttachList"] + base.update({"lanAttachList": atch_list}) diff_attach.append(base) - dep_vrf = want_a['vrfName'] + dep_vrf = want_a["vrfName"] if dep_vrf: all_vrfs += dep_vrf + "," if all_vrfs: - diff_deploy.update({'vrfNames': all_vrfs[:-1]}) + diff_deploy.update({"vrfNames": all_vrfs[:-1]}) if vrf_found and not attach_found: self.diff_create = [] @@ -1157,8 +1318,12 @@ def format_diff(self): diff_create_update = copy.deepcopy(self.diff_create_update) diff_attach = copy.deepcopy(self.diff_attach) diff_detach = copy.deepcopy(self.diff_detach) - diff_deploy = self.diff_deploy['vrfNames'].split(",") if self.diff_deploy else [] - diff_undeploy = self.diff_undeploy['vrfNames'].split(",") if self.diff_undeploy else [] + diff_deploy = ( + self.diff_deploy["vrfNames"].split(",") if self.diff_deploy else [] + ) + diff_undeploy = ( + self.diff_undeploy["vrfNames"].split(",") if self.diff_undeploy else [] + ) diff_create.extend(diff_create_quick) diff_create.extend(diff_create_update) @@ -1167,46 +1332,49 @@ def format_diff(self): for want_d in diff_create: - found_a = next((vrf for vrf in diff_attach if vrf['vrfName'] == want_d['vrfName']), None) + found_a = next( + (vrf for vrf in diff_attach if vrf["vrfName"] == want_d["vrfName"]), + None, + ) found_c = want_d - src = found_c['source'] - found_c.update({'vrf_name': found_c['vrfName']}) - found_c.update({'vrf_id': found_c['vrfId']}) - found_c.update({'vrf_template': found_c['vrfTemplate']}) - found_c.update({'vrf_extension_template': found_c['vrfExtensionTemplate']}) - del found_c['source'] - found_c.update({'source': src}) - found_c.update({'service_vrf_template': found_c['serviceVrfTemplate']}) - found_c.update({'attach': []}) - - del found_c['fabric'] - del found_c['vrfName'] - del found_c['vrfId'] - del found_c['vrfTemplate'] - del found_c['vrfExtensionTemplate'] - del found_c['serviceVrfTemplate'] - del found_c['vrfTemplateConfig'] + src = found_c["source"] + found_c.update({"vrf_name": found_c["vrfName"]}) + found_c.update({"vrf_id": found_c["vrfId"]}) + found_c.update({"vrf_template": found_c["vrfTemplate"]}) + found_c.update({"vrf_extension_template": found_c["vrfExtensionTemplate"]}) + del found_c["source"] + found_c.update({"source": src}) + found_c.update({"service_vrf_template": found_c["serviceVrfTemplate"]}) + found_c.update({"attach": []}) + + del found_c["fabric"] + del found_c["vrfName"] + del found_c["vrfId"] + del found_c["vrfTemplate"] + del found_c["vrfExtensionTemplate"] + del found_c["serviceVrfTemplate"] + del found_c["vrfTemplateConfig"] if diff_deploy: - diff_deploy.remove(found_c['vrf_name']) + diff_deploy.remove(found_c["vrf_name"]) if not found_a: diff.append(found_c) continue - attach = found_a['lanAttachList'] + attach = found_a["lanAttachList"] for a_w in attach: attach_d = {} for k, v in self.ip_sn.items(): - if v == a_w['serialNumber']: - attach_d.update({'ip_address': k}) + if v == a_w["serialNumber"]: + attach_d.update({"ip_address": k}) break - attach_d.update({'vlan_id': a_w['vlan']}) - attach_d.update({'deploy': a_w['deployment']}) - found_c['attach'].append(attach_d) + attach_d.update({"vlan_id": a_w["vlan"]}) + attach_d.update({"deploy": a_w["deployment"]}) + found_c["attach"].append(attach_d) diff.append(found_c) @@ -1215,135 +1383,156 @@ def format_diff(self): for vrf in diff_attach: new_attach_dict = {} new_attach_list = [] - attach = vrf['lanAttachList'] + attach = vrf["lanAttachList"] for a_w in attach: attach_d = {} for k, v in self.ip_sn.items(): - if v == a_w['serialNumber']: - attach_d.update({'ip_address': k}) + if v == a_w["serialNumber"]: + attach_d.update({"ip_address": k}) break - attach_d.update({'vlan_id': a_w['vlan']}) - attach_d.update({'deploy': a_w['deployment']}) + attach_d.update({"vlan_id": a_w["vlan"]}) + attach_d.update({"deploy": a_w["deployment"]}) new_attach_list.append(attach_d) if new_attach_list: - if diff_deploy and vrf['vrfName'] in diff_deploy: - diff_deploy.remove(vrf['vrfName']) - new_attach_dict.update({'attach': new_attach_list}) - new_attach_dict.update({'vrf_name': vrf['vrfName']}) + if diff_deploy and vrf["vrfName"] in diff_deploy: + diff_deploy.remove(vrf["vrfName"]) + new_attach_dict.update({"attach": new_attach_list}) + new_attach_dict.update({"vrf_name": vrf["vrfName"]}) diff.append(new_attach_dict) for vrf in diff_deploy: - new_deploy_dict = {'vrf_name': vrf} + new_deploy_dict = {"vrf_name": vrf} diff.append(new_deploy_dict) self.diff_input_format = diff def get_diff_query(self): - method = 'GET' + method = "GET" path = self.paths["GET_VRF"].format(self.fabric) vrf_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') - - if vrf_objects.get('ERROR') == 'Not Found' and vrf_objects.get('RETURN_CODE') == 404: - self.module.fail_json(msg="Fabric {} not present on DCNM".format(self.fabric)) + missing_fabric, not_ok = self.handle_response(vrf_objects, "query_dcnm") + + if ( + vrf_objects.get("ERROR") == "Not Found" + and vrf_objects.get("RETURN_CODE") == 404 + ): + self.module.fail_json( + msg="Fabric {0} not present on DCNM".format(self.fabric) + ) return if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find VRFs under fabric: {}".format(self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = "Unable to find VRFs under fabric: {0}".format(self.fabric) self.module.fail_json(msg=msg1 if missing_fabric else msg2) - if not vrf_objects['DATA']: + if not vrf_objects["DATA"]: return if self.config: query = [] for want_c in self.want_create: # Query the VRF - for vrf in vrf_objects['DATA']: + for vrf in vrf_objects["DATA"]: - if (want_c['vrfName'] == vrf['vrfName']): + if want_c["vrfName"] == vrf["vrfName"]: - item = {'parent': {}, 'attach': []} - item['parent'] = vrf + item = {"parent": {}, "attach": []} + item["parent"] = vrf # Query the Attachment for the found VRF - method = 'GET' - path = self.paths["GET_VRF_ATTACH"].format(self.fabric, vrf['vrfName']) + method = "GET" + path = self.paths["GET_VRF_ATTACH"].format( + self.fabric, vrf["vrfName"] + ) vrf_attach_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_attach_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response( + vrf_attach_objects, "query_dcnm" + ) if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find attachments for " \ - "vrfs: {} under fabric: {}".format(vrf['vrfName'], self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = ( + "Unable to find attachments for " + "vrfs: {0} under fabric: {1}".format( + vrf["vrfName"], self.fabric + ) + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return - if not vrf_attach_objects['DATA']: + if not vrf_attach_objects["DATA"]: return - for vrf_attach in vrf_attach_objects['DATA']: - if want_c['vrfName'] == vrf_attach['vrfName']: - if not vrf_attach.get('lanAttachList'): + for vrf_attach in vrf_attach_objects["DATA"]: + if want_c["vrfName"] == vrf_attach["vrfName"]: + if not vrf_attach.get("lanAttachList"): continue - attach_list = vrf_attach['lanAttachList'] + attach_list = vrf_attach["lanAttachList"] for attach in attach_list: - path = self.paths["GET_VRF_SWITCH"].format(self.fabric, attach['vrfName'], - attach['switchSerialNo']) + path = self.paths["GET_VRF_SWITCH"].format( + self.fabric, + attach["vrfName"], + attach["switchSerialNo"], + ) lite_objects = dcnm_send(self.module, method, path) - if not lite_objects.get('DATA'): + if not lite_objects.get("DATA"): return - item['attach'].append(lite_objects.get('DATA')[0]) + item["attach"].append(lite_objects.get("DATA")[0]) query.append(item) else: query = [] # Query the VRF - for vrf in vrf_objects['DATA']: - item = {'parent': {}, 'attach': []} - item['parent'] = vrf + for vrf in vrf_objects["DATA"]: + item = {"parent": {}, "attach": []} + item["parent"] = vrf # Query the Attachment for the found VRF - method = 'GET' - path = self.paths["GET_VRF_ATTACH"].format(self.fabric, vrf['vrfName']) + method = "GET" + path = self.paths["GET_VRF_ATTACH"].format(self.fabric, vrf["vrfName"]) vrf_attach_objects = dcnm_send(self.module, method, path) - missing_fabric, not_ok = self.handle_response(vrf_objects, 'query_dcnm') + missing_fabric, not_ok = self.handle_response(vrf_objects, "query_dcnm") if missing_fabric or not_ok: - msg1 = "Fabric {} not present on DCNM".format(self.fabric) - msg2 = "Unable to find attachments for " \ - "vrfs: {} under fabric: {}".format(vrf['vrfName'], self.fabric) + msg1 = "Fabric {0} not present on DCNM".format(self.fabric) + msg2 = ( + "Unable to find attachments for " + "vrfs: {0} under fabric: {1}".format( + vrf["vrfName"], self.fabric + ) + ) self.module.fail_json(msg=msg1 if missing_fabric else msg2) return - if not vrf_attach_objects['DATA']: + if not vrf_attach_objects["DATA"]: return - for vrf_attach in vrf_attach_objects['DATA']: - if not vrf_attach.get('lanAttachList'): + for vrf_attach in vrf_attach_objects["DATA"]: + if not vrf_attach.get("lanAttachList"): continue - attach_list = vrf_attach['lanAttachList'] + attach_list = vrf_attach["lanAttachList"] for attach in attach_list: - path = self.paths["GET_VRF_SWITCH"].format(self.fabric, attach['vrfName'], - attach['switchSerialNo']) + path = self.paths["GET_VRF_SWITCH"].format( + self.fabric, attach["vrfName"], attach["switchSerialNo"] + ) lite_objects = dcnm_send(self.module, method, path) - if not lite_objects.get('DATA'): + if not lite_objects.get("DATA"): return - item['attach'].append(lite_objects.get('DATA')[0]) + item["attach"].append(lite_objects.get("DATA")[0]) query.append(item) self.query = query @@ -1352,13 +1541,13 @@ def push_to_remote(self, is_rollback=False): path = self.paths["GET_VRF"].format(self.fabric) - method = 'PUT' + method = "PUT" if self.diff_create_update: for vrf in self.diff_create_update: - update_path = path + '/{}'.format(vrf['vrfName']) + update_path = path + "/{0}".format(vrf["vrfName"]) resp = dcnm_send(self.module, method, update_path, json.dumps(vrf)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: self.failed_to_rollback = True @@ -1371,49 +1560,53 @@ def push_to_remote(self, is_rollback=False): # created. This is needed specially for state: overridden # - method = 'POST' + method = "POST" if self.diff_detach: - detach_path = path + '/attachments' + detach_path = path + "/attachments" # Update the fabric name to specific fabric to which the switches belong for multisite fabric. - if self.fabric_type == 'MFD': + if self.fabric_type == "MFD": for elem in self.diff_detach: - for node in elem['lanAttachList']: - node['fabric'] = self.sn_fab[node['serialNumber']] + for node in elem["lanAttachList"]: + node["fabric"] = self.sn_fab[node["serialNumber"]] - resp = dcnm_send(self.module, method, detach_path, json.dumps(self.diff_detach)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "attach") + resp = dcnm_send( + self.module, method, detach_path, json.dumps(self.diff_detach) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "attach") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_undeploy: - deploy_path = path + '/deployments' - resp = dcnm_send(self.module, method, deploy_path, json.dumps(self.diff_undeploy)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "deploy") + deploy_path = path + "/deployments" + resp = dcnm_send( + self.module, method, deploy_path, json.dumps(self.diff_undeploy) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "deploy") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - del_failure = '' + del_failure = "" if self.diff_delete and self.wait_for_vrf_del_ready(): - method = 'DELETE' + method = "DELETE" for vrf, state in self.diff_delete.items(): - if state == 'OUT-OF-SYNC': + if state == "OUT-OF-SYNC": del_failure += vrf + "," continue delete_path = path + "/" + vrf resp = dcnm_send(self.module, method, delete_path) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "delete") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "delete") if fail: if is_rollback: self.failed_to_rollback = True @@ -1421,35 +1614,41 @@ def push_to_remote(self, is_rollback=False): self.failure(resp) if del_failure: - self.result['response'].append('Deletion of vrfs {} has failed'.format(del_failure[:-1])) + self.result["response"].append( + "Deletion of vrfs {0} has failed".format(del_failure[:-1]) + ) self.module.fail_json(msg=self.result) - method = 'POST' + method = "POST" if self.diff_create: for vrf in self.diff_create: - json_to_dict = json.loads(vrf['vrfTemplateConfig']) - vlanId = json_to_dict.get('vlanId', "0") + json_to_dict = json.loads(vrf["vrfTemplateConfig"]) + vlanId = json_to_dict.get("vlanId", "0") if vlanId == 0: vlan_path = self.paths["GET_VLAN"].format(self.fabric) - vlan_data = dcnm_send(self.module, 'GET', vlan_path) + vlan_data = dcnm_send(self.module, "GET", vlan_path) - if vlan_data['RETURN_CODE'] != 200: - self.module.fail_json(msg='Failure getting autogenerated vlan_id {}'.format(vlan_data)) - vlanId = vlan_data['DATA'] + if vlan_data["RETURN_CODE"] != 200: + self.module.fail_json( + msg="Failure getting autogenerated vlan_id {0}".format( + vlan_data + ) + ) + vlanId = vlan_data["DATA"] t_conf = { - 'vrfSegmentId': json_to_dict.get('vrfId', ""), - 'vrfName': json_to_dict.get('vrfName', ""), - 'vlanId': vlanId + "vrfSegmentId": json_to_dict.get("vrfId", ""), + "vrfName": json_to_dict.get("vrfName", ""), + "vlanId": vlanId, } - vrf.update({'vrfTemplateConfig': json.dumps(t_conf)}) + vrf.update({"vrfTemplateConfig": json.dumps(t_conf)}) resp = dcnm_send(self.module, method, path, json.dumps(vrf)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "create") + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "create") if fail: if is_rollback: self.failed_to_rollback = True @@ -1458,112 +1657,164 @@ def push_to_remote(self, is_rollback=False): if self.diff_attach: for d_a in self.diff_attach: - for v_a in d_a['lanAttachList']: - if v_a.get('vrf_lite'): - '''Before apply the vrf_lite config, need double check if the switch role is started wth Border''' - r = re.search(r'\bborder\b', self.role.lower()) + for v_a in d_a["lanAttachList"]: + if v_a.get("vrf_lite"): + """Before apply the vrf_lite config, need double check if the switch role is started wth Border""" + r = re.search(r"\bborder\b", self.role.lower()) if not r: - msg = 'VRF LITE cannot be attached to switch {} with role {}'.format(v_a['ip_address'], self.role) + msg = "VRF LITE cannot be attached to switch {0} with role {1}".format( + v_a["ip_address"], self.role + ) self.module.fail_json(msg=msg) - '''Get the IP/Interface that is connected to edge router can be get from below query''' - method = 'GET' - path = self.paths["GET_VRF_SWITCH"].format(self.fabric, self.diff_attach[0]['vrfName'], self.serial) + """Get the IP/Interface that is connected to edge router can be get from below query""" + method = "GET" + path = self.paths["GET_VRF_SWITCH"].format( + self.fabric, self.diff_attach[0]["vrfName"], self.serial + ) lite_objects = dcnm_send(self.module, method, path) - if not lite_objects.get('DATA'): + if not lite_objects.get("DATA"): return - lite = lite_objects['DATA'][0]['switchDetailsList'][0]['extensionPrototypeValues'] + lite = lite_objects["DATA"][0]["switchDetailsList"][0][ + "extensionPrototypeValues" + ] ext_values = None for ext_l in lite: - if str(ext_l.get('extensionType')) == 'VRF_LITE': - ext_values = ext_l['extensionValues'] + if str(ext_l.get("extensionType")) == "VRF_LITE": + ext_values = ext_l["extensionValues"] ext_values = ast.literal_eval(ext_values) extension_values = {} - for ad_l in v_a['vrf_lite']: + for ad_l in v_a["vrf_lite"]: vrflite_con = {} - vrflite_con['VRF_LITE_CONN'] = [] - vrflite_con['VRF_LITE_CONN'].append({}) - if ad_l['interface']: - vrflite_con['VRF_LITE_CONN'][0]['IF_NAME'] = ad_l['interface'] + vrflite_con["VRF_LITE_CONN"] = [] + vrflite_con["VRF_LITE_CONN"].append({}) + if ad_l["interface"]: + vrflite_con["VRF_LITE_CONN"][0][ + "IF_NAME" + ] = ad_l["interface"] else: - vrflite_con['VRF_LITE_CONN'][0]['IF_NAME'] = ext_values['IF_NAME'] - - if ad_l['dot1q']: - vrflite_con['VRF_LITE_CONN'][0]['DOT1Q_ID'] = str(ad_l['dot1q']) + vrflite_con["VRF_LITE_CONN"][0][ + "IF_NAME" + ] = ext_values["IF_NAME"] + + if ad_l["dot1q"]: + vrflite_con["VRF_LITE_CONN"][0][ + "DOT1Q_ID" + ] = str(ad_l["dot1q"]) else: - vrflite_con['VRF_LITE_CONN'][0]['DOT1Q_ID'] = str(ext_values['DOT1Q_ID']) - - if ad_l['ipv4_addr']: - vrflite_con['VRF_LITE_CONN'][0]['IP_MASK'] = ad_l['ipv4_addr'] + vrflite_con["VRF_LITE_CONN"][0][ + "DOT1Q_ID" + ] = str(ext_values["DOT1Q_ID"]) + + if ad_l["ipv4_addr"]: + vrflite_con["VRF_LITE_CONN"][0][ + "IP_MASK" + ] = ad_l["ipv4_addr"] else: - vrflite_con['VRF_LITE_CONN'][0]['IP_MASK'] = ext_values['IP_MASK'] - - if ad_l['neighbor_ipv4']: - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_IP'] = ad_l['neighbor_ipv4'] + vrflite_con["VRF_LITE_CONN"][0][ + "IP_MASK" + ] = ext_values["IP_MASK"] + + if ad_l["neighbor_ipv4"]: + vrflite_con["VRF_LITE_CONN"][0][ + "NEIGHBOR_IP" + ] = ad_l["neighbor_ipv4"] else: - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_IP'] = ext_values['NEIGHBOR_IP'] - - vrflite_con['VRF_LITE_CONN'][0]['NEIGHBOR_ASN'] = ext_values['NEIGHBOR_ASN'] - - if ad_l['ipv6_addr']: - vrflite_con['VRF_LITE_CONN'][0]['IPV6_MASK'] = ad_l['ipv6_addr'] + vrflite_con["VRF_LITE_CONN"][0][ + "NEIGHBOR_IP" + ] = ext_values["NEIGHBOR_IP"] + + vrflite_con["VRF_LITE_CONN"][0][ + "NEIGHBOR_ASN" + ] = ext_values["NEIGHBOR_ASN"] + + if ad_l["ipv6_addr"]: + vrflite_con["VRF_LITE_CONN"][0][ + "IPV6_MASK" + ] = ad_l["ipv6_addr"] else: - vrflite_con['VRF_LITE_CONN'][0]['IPV6_MASK'] = ext_values['IPV6_MASK'] - - if ad_l['neighbor_ipv6']: - vrflite_con['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR'] = ad_l['neighbor_ipv6'] + vrflite_con["VRF_LITE_CONN"][0][ + "IPV6_MASK" + ] = ext_values["IPV6_MASK"] + + if ad_l["neighbor_ipv6"]: + vrflite_con["VRF_LITE_CONN"][0][ + "IPV6_NEIGHBOR" + ] = ad_l["neighbor_ipv6"] else: - vrflite_con['VRF_LITE_CONN'][0]['IPV6_NEIGHBOR'] = ext_values['IPV6_NEIGHBOR'] - - vrflite_con['VRF_LITE_CONN'][0]['AUTO_VRF_LITE_FLAG'] = ext_values['AUTO_VRF_LITE_FLAG'] - vrflite_con['VRF_LITE_CONN'][0]['PEER_VRF_NAME'] = ad_l['peer_vrf'] - vrflite_con['VRF_LITE_CONN'][0]['VRF_LITE_JYTHON_TEMPLATE'] = 'Ext_VRF_Lite_Jython' - extension_values['VRF_LITE_CONN'] = json.dumps(vrflite_con) + vrflite_con["VRF_LITE_CONN"][0][ + "IPV6_NEIGHBOR" + ] = ext_values["IPV6_NEIGHBOR"] + + vrflite_con["VRF_LITE_CONN"][0][ + "AUTO_VRF_LITE_FLAG" + ] = ext_values["AUTO_VRF_LITE_FLAG"] + vrflite_con["VRF_LITE_CONN"][0][ + "PEER_VRF_NAME" + ] = ad_l["peer_vrf"] + vrflite_con["VRF_LITE_CONN"][0][ + "VRF_LITE_JYTHON_TEMPLATE" + ] = "Ext_VRF_Lite_Jython" + extension_values["VRF_LITE_CONN"] = json.dumps( + vrflite_con + ) ms_con = {} - ms_con['MULTISITE_CONN'] = [] - extension_values['MULTISITE_CONN'] = json.dumps(ms_con) - - v_a['extensionValues'] = json.dumps(extension_values).replace(' ', '') - v_a['instanceValues'] = "{\"loopbackId\":\"\",\"loopbackIpAddress\":\"\",\"loopbackIpV6Address\":\"\"}" - del v_a['vrf_lite'] + ms_con["MULTISITE_CONN"] = [] + extension_values["MULTISITE_CONN"] = json.dumps( + ms_con + ) + + v_a["extensionValues"] = json.dumps( + extension_values + ).replace(" ", "") + v_a[ + "instanceValues" + ] = '{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}' + del v_a["vrf_lite"] if ext_values is None: - msg = 'There is no VRF LITE capable interface on this witch {}'.format(v_a['ip_address']) + msg = "There is no VRF LITE capable interface on this witch {0}".format( + v_a["ip_address"] + ) self.module.fail_json(msg=msg) else: - if ((v_a.get('vrf_lite', None) is not None)): - del v_a['vrf_lite'] + if v_a.get("vrf_lite", None) is not None: + del v_a["vrf_lite"] path = self.paths["GET_VRF"].format(self.fabric) - method = 'POST' - attach_path = path + '/attachments' + method = "POST" + attach_path = path + "/attachments" # Update the fabric name to specific fabric to which the switches belong for multisite fabric. - if self.fabric_type == 'MFD': + if self.fabric_type == "MFD": for elem in self.diff_attach: - for node in elem['lanAttachList']: - node['fabric'] = self.sn_fab[node['serialNumber']] + for node in elem["lanAttachList"]: + node["fabric"] = self.sn_fab[node["serialNumber"]] - resp = dcnm_send(self.module, method, attach_path, json.dumps(self.diff_attach)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "attach") + resp = dcnm_send( + self.module, method, attach_path, json.dumps(self.diff_attach) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "attach") if fail: if is_rollback: self.failed_to_rollback = True return self.failure(resp) - method = 'POST' + method = "POST" if self.diff_deploy: - deploy_path = path + '/deployments' - resp = dcnm_send(self.module, method, deploy_path, json.dumps(self.diff_deploy)) - self.result['response'].append(resp) - fail, self.result['changed'] = self.handle_response(resp, "deploy") + deploy_path = path + "/deployments" + resp = dcnm_send( + self.module, method, deploy_path, json.dumps(self.diff_deploy) + ) + self.result["response"].append(resp) + fail, self.result["changed"] = self.handle_response(resp, "deploy") if fail: if is_rollback: self.failed_to_rollback = True @@ -1572,7 +1823,7 @@ def push_to_remote(self, is_rollback=False): def wait_for_vrf_del_ready(self): - method = 'GET' + method = "GET" if self.diff_delete: for vrf in self.diff_delete: state = False @@ -1580,52 +1831,57 @@ def wait_for_vrf_del_ready(self): while not state: resp = dcnm_send(self.module, method, path) state = True - if resp.get('DATA') is not None: - attach_list = resp['DATA'][0]['lanAttachList'] + if resp.get("DATA") is not None: + attach_list = resp["DATA"][0]["lanAttachList"] for atch in attach_list: - if atch['lanAttachState'] == 'OUT-OF-SYNC' or atch['lanAttachState'] == 'FAILED': - self.diff_delete.update({vrf: 'OUT-OF-SYNC'}) + if ( + atch["lanAttachState"] == "OUT-OF-SYNC" + or atch["lanAttachState"] == "FAILED" + ): + self.diff_delete.update({vrf: "OUT-OF-SYNC"}) break - if atch['lanAttachState'] != 'NA': - self.diff_delete.update({vrf: 'DEPLOYED'}) + if atch["lanAttachState"] != "NA": + self.diff_delete.update({vrf: "DEPLOYED"}) state = False time.sleep(self.WAIT_TIME_FOR_DELETE_LOOP) break - self.diff_delete.update({vrf: 'NA'}) + self.diff_delete.update({vrf: "NA"}) return True def validate_input(self): """Parse the playbook values, validate to param specs.""" - state = self.params['state'] + state = self.params["state"] - if state == 'merged' or state == 'overridden' or state == 'replaced': + if state == "merged" or state == "overridden" or state == "replaced": vrf_spec = dict( - vrf_name=dict(required=True, type='str', length_max=32), - vrf_id=dict(type='int', range_max=16777214), - vrf_template=dict(type='str', default='Default_VRF_Universal'), - vrf_extension_template=dict(type='str', default='Default_VRF_Extension_Universal'), - vlan_id=dict(type='int', range_max=4094), - source=dict(type='str', default=None), - service_vrf_template=dict(type='str', default=None), - attach=dict(type='list'), - deploy=dict(type='bool', default=True) + vrf_name=dict(required=True, type="str", length_max=32), + vrf_id=dict(type="int", range_max=16777214), + vrf_template=dict(type="str", default="Default_VRF_Universal"), + vrf_extension_template=dict( + type="str", default="Default_VRF_Extension_Universal" + ), + vlan_id=dict(type="int", range_max=4094), + source=dict(type="str", default=None), + service_vrf_template=dict(type="str", default=None), + attach=dict(type="list"), + deploy=dict(type="bool", default=True), ) att_spec = dict( - ip_address=dict(required=True, type='str'), - deploy=dict(type='bool', default=True), - vrf_lite=dict(type='list', default=[]) + ip_address=dict(required=True, type="str"), + deploy=dict(type="bool", default=True), + vrf_lite=dict(type="list", default=[]), ) lite_spec = dict( - interface=dict(type='str'), - peer_vrf=dict(required=True, type='str'), - ipv4_addr=dict(type='ipv4_subnet'), - neighbor_ipv4=dict(type='ipv4'), - ipv6_addr=dict(type='ipv6'), - neighbor_ipv6=dict(type='ipv6'), - dot1q=dict(type='int') + interface=dict(type="str"), + peer_vrf=dict(required=True, type="str"), + ipv4_addr=dict(type="ipv4_subnet"), + neighbor_ipv4=dict(type="ipv4"), + ipv6_addr=dict(type="ipv6"), + neighbor_ipv6=dict(type="ipv6"), + dot1q=dict(type="int"), ) msg = None @@ -1633,107 +1889,126 @@ def validate_input(self): for vrf in self.config: # A few user provided vrf parameters need special handling # Ignore user input for src and hard code it to None - vrf['source'] = None - if not vrf.get('service_vrf_template'): - vrf['service_vrf_template'] = None + vrf["source"] = None + if not vrf.get("service_vrf_template"): + vrf["service_vrf_template"] = None - if 'vrf_name' not in vrf: + if "vrf_name" not in vrf: msg = "vrf_name is mandatory under vrf parameters" - if 'attach' in vrf and vrf['attach']: - for attach in vrf['attach']: + if "attach" in vrf and vrf["attach"]: + for attach in vrf["attach"]: # if 'ip_address' not in attach or 'vlan_id' not in attach: # msg = "ip_address and vlan_id are mandatory under attach parameters" - if 'ip_address' not in attach: + if "ip_address" not in attach: msg = "ip_address is mandatory under attach parameters" - if attach.get('vrf_lite'): - for vl in attach['vrf_lite']: - if not vl.get('peer_vrf'): + if attach.get("vrf_lite"): + for vl in attach["vrf_lite"]: + if not vl.get("peer_vrf"): msg = "peer_vrf is mandatory under attach VRF LITE parameters" else: - if state == 'merged' or state == 'overridden' or \ - state == 'replaced': - msg = "config: element is mandatory for this state {}".format(state) + if state == "merged" or state == "overridden" or state == "replaced": + msg = "config: element is mandatory for this state {0}".format( + state + ) if msg: self.module.fail_json(msg=msg) if self.config: - valid_vrf, invalid_params = validate_list_of_dicts(self.config, vrf_spec) + valid_vrf, invalid_params = validate_list_of_dicts( + self.config, vrf_spec + ) for vrf in valid_vrf: - if vrf.get('attach'): + if vrf.get("attach"): # The deploy setting provided in the user parameters # has the following behavior: # (1) By default deploy is true # (2) The global 'deploy' option for the vrf applies to # any attachments that don't have the 'deploy' # option explicity set. - for entry in vrf.get('attach'): - if 'deploy' not in entry.keys() and 'deploy' in vrf: + for entry in vrf.get("attach"): + if "deploy" not in entry.keys() and "deploy" in vrf: # This attach entry does not have a deploy key # but the vrf global deploy flag is set so set # it to the global 'deploy' value - entry['deploy'] = vrf['deploy'] - valid_att, invalid_att = validate_list_of_dicts(vrf['attach'], att_spec) - vrf['attach'] = valid_att + entry["deploy"] = vrf["deploy"] + valid_att, invalid_att = validate_list_of_dicts( + vrf["attach"], att_spec + ) + vrf["attach"] = valid_att invalid_params.extend(invalid_att) - for lite in vrf.get('attach'): - if lite.get('vrf_lite'): - valid_lite, invalid_lite = validate_list_of_dicts(lite['vrf_lite'], lite_spec) - lite['vrf_lite'] = valid_lite + for lite in vrf.get("attach"): + if lite.get("vrf_lite"): + valid_lite, invalid_lite = validate_list_of_dicts( + lite["vrf_lite"], lite_spec + ) + lite["vrf_lite"] = valid_lite invalid_params.extend(invalid_lite) self.validated.append(vrf) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) else: vrf_spec = dict( - vrf_name=dict(required=True, type='str', length_max=32), - vrf_id=dict(type='int', range_max=16777214), - vrf_template=dict(type='str', default='Default_VRF_Universal'), - vrf_extension_template=dict(type='str', default='Default_VRF_Extension_Universal'), - vlan_id=dict(type='int', range_max=4094), - source=dict(type='str', default=None), - service_vrf_template=dict(type='str', default=None), - attach=dict(type='list'), - deploy=dict(type='bool') + vrf_name=dict(required=True, type="str", length_max=32), + vrf_id=dict(type="int", range_max=16777214), + vrf_template=dict(type="str", default="Default_VRF_Universal"), + vrf_extension_template=dict( + type="str", default="Default_VRF_Extension_Universal" + ), + vlan_id=dict(type="int", range_max=4094), + source=dict(type="str", default=None), + service_vrf_template=dict(type="str", default=None), + attach=dict(type="list"), + deploy=dict(type="bool"), ) att_spec = dict( - ip_address=dict(required=True, type='str'), - deploy=dict(type='bool', default=True), - vrf_lite=dict(type='list', default=[]) + ip_address=dict(required=True, type="str"), + deploy=dict(type="bool", default=True), + vrf_lite=dict(type="list", default=[]), ) lite_spec = dict( - interface=dict(type='str'), - peer_vrf=dict(required=True, type='str'), - ipv4_addr=dict(type='ipv4_subnet'), - neighbor_ipv4=dict(type='ipv4'), - ipv6_addr=dict(type='ipv6'), - neighbor_ipv6=dict(type='ipv6'), - dot1q=dict(type='int') + interface=dict(type="str"), + peer_vrf=dict(required=True, type="str"), + ipv4_addr=dict(type="ipv4_subnet"), + neighbor_ipv4=dict(type="ipv4"), + ipv6_addr=dict(type="ipv6"), + neighbor_ipv6=dict(type="ipv6"), + dot1q=dict(type="int"), ) if self.config: - valid_vrf, invalid_params = validate_list_of_dicts(self.config, vrf_spec) + valid_vrf, invalid_params = validate_list_of_dicts( + self.config, vrf_spec + ) for vrf in valid_vrf: - if vrf.get('attach'): - valid_att, invalid_att = validate_list_of_dicts(vrf['attach'], att_spec) - vrf['attach'] = valid_att + if vrf.get("attach"): + valid_att, invalid_att = validate_list_of_dicts( + vrf["attach"], att_spec + ) + vrf["attach"] = valid_att invalid_params.extend(invalid_att) - for lite in vrf.get('attach'): - if lite.get('vrf_lite'): - valid_lite, invalid_lite = validate_list_of_dicts(lite['vrf_lite'], lite_spec) - lite['vrf_lite'] = valid_lite + for lite in vrf.get("attach"): + if lite.get("vrf_lite"): + valid_lite, invalid_lite = validate_list_of_dicts( + lite["vrf_lite"], lite_spec + ) + lite["vrf_lite"] = valid_lite invalid_params.extend(invalid_lite) self.validated.append(vrf) if invalid_params: - msg = 'Invalid parameters in playbook: {}'.format('\n'.join(invalid_params)) + msg = "Invalid parameters in playbook: {0}".format( + "\n".join(invalid_params) + ) self.module.fail_json(msg=msg) def handle_response(self, res, op): @@ -1741,28 +2016,28 @@ def handle_response(self, res, op): fail = False changed = True - if op == 'query_dcnm': + if op == "query_dcnm": # This if blocks handles responses to the query APIs against DCNM. # Basically all GET operations. # - if res.get('ERROR') == 'Not Found' and res['RETURN_CODE'] == 404: + if res.get("ERROR") == "Not Found" and res["RETURN_CODE"] == 404: return True, False - if res['RETURN_CODE'] != 200 or res['MESSAGE'] != 'OK': + if res["RETURN_CODE"] != 200 or res["MESSAGE"] != "OK": return False, True return False, False # Responses to all other operations POST and PUT are handled here. - if res.get('MESSAGE') != 'OK': + if res.get("MESSAGE") != "OK": fail = True changed = False return fail, changed - if res.get('ERROR'): + if res.get("ERROR"): fail = True changed = False - if op == 'attach' and 'is in use already' in str(res.values()): + if op == "attach" and "is in use already" in str(res.values()): fail = True changed = False - if op == 'deploy' and 'No switches PENDING for deployment' in str(res.values()): + if op == "deploy" and "No switches PENDING for deployment" in str(res.values()): changed = False return fail, changed @@ -1787,16 +2062,18 @@ def failure(self, resp): if self.failed_to_rollback: msg1 = "FAILED - Attempted rollback of the task has failed, may need manual intervention" else: - msg1 = 'SUCCESS - Attempted rollback of the task has succeeded' + msg1 = "SUCCESS - Attempted rollback of the task has succeeded" res = copy.deepcopy(resp) - res.update({'ROLLBACK_RESULT': msg1}) + res.update({"ROLLBACK_RESULT": msg1}) - if not resp.get('DATA'): - data = copy.deepcopy(resp.get('DATA')) - if data.get('stackTrace'): - data.update({'stackTrace': 'Stack trace is hidden, use \'-vvvvv\' to print it'}) - res.update({'DATA': data}) + if not resp.get("DATA"): + data = copy.deepcopy(resp.get("DATA")) + if data.get("stackTrace"): + data.update( + {"stackTrace": "Stack trace is hidden, use '-vvvvv' to print it"} + ) + res.update({"DATA": data}) if self.module._verbosity >= 5: self.module.fail_json(msg=res) @@ -1805,57 +2082,68 @@ def failure(self, resp): def main(): - """ main entry point for module execution - """ + """main entry point for module execution""" element_spec = dict( - fabric=dict(required=True, type='str'), - config=dict(required=False, type='list', elements='dict'), - state=dict(default='merged', - choices=['merged', 'replaced', 'deleted', 'overridden', 'query']), + fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + default="merged", + choices=["merged", "replaced", "deleted", "overridden", "query"], + ), ) - module = AnsibleModule(argument_spec=element_spec, - supports_check_mode=True) + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=True) dcnm_vrf = DcnmVrf(module) if not dcnm_vrf.ip_sn: - module.fail_json(msg="Fabric {} missing on DCNM or does not have any switches".format(dcnm_vrf.fabric)) + module.fail_json( + msg="Fabric {0} missing on DCNM or does not have any switches".format( + dcnm_vrf.fabric + ) + ) dcnm_vrf.validate_input() dcnm_vrf.get_want() dcnm_vrf.get_have() - if module.params['state'] == 'merged': + if module.params["state"] == "merged": dcnm_vrf.get_diff_merge() - if module.params['state'] == 'replaced': + if module.params["state"] == "replaced": dcnm_vrf.get_diff_replace() - if module.params['state'] == 'overridden': + if module.params["state"] == "overridden": dcnm_vrf.get_diff_override() - if module.params['state'] == 'deleted': + if module.params["state"] == "deleted": dcnm_vrf.get_diff_delete() - if module.params['state'] == 'query': + if module.params["state"] == "query": dcnm_vrf.get_diff_query() - dcnm_vrf.result['response'] = dcnm_vrf.query + dcnm_vrf.result["response"] = dcnm_vrf.query dcnm_vrf.format_diff() - dcnm_vrf.result['diff'] = dcnm_vrf.diff_input_format - - if dcnm_vrf.diff_create or dcnm_vrf.diff_attach or dcnm_vrf.diff_detach or dcnm_vrf.diff_deploy \ - or dcnm_vrf.diff_undeploy or dcnm_vrf.diff_delete or dcnm_vrf.diff_create_quick \ - or dcnm_vrf.diff_create_update: - dcnm_vrf.result['changed'] = True + dcnm_vrf.result["diff"] = dcnm_vrf.diff_input_format + + if ( + dcnm_vrf.diff_create + or dcnm_vrf.diff_attach + or dcnm_vrf.diff_detach + or dcnm_vrf.diff_deploy + or dcnm_vrf.diff_undeploy + or dcnm_vrf.diff_delete + or dcnm_vrf.diff_create_quick + or dcnm_vrf.diff_create_update + ): + dcnm_vrf.result["changed"] = True else: module.exit_json(**dcnm_vrf.result) if module.check_mode: - dcnm_vrf.result['changed'] = False + dcnm_vrf.result["changed"] = False module.exit_json(**dcnm_vrf.result) dcnm_vrf.push_to_remote() @@ -1863,5 +2151,5 @@ def main(): module.exit_json(**dcnm_vrf.result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 000000000..9b1960e71 --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1 @@ +output/ \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_delete_diff_options.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_delete_diff_options.yaml index 596a69eb7..25972a462 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_delete_diff_options.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_delete_diff_options.yaml @@ -362,4 +362,3 @@ - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' when: IT_CONTEXT is not defined - diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_delete.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_delete.yaml index aa97a7491..a85eef0f1 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_delete.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_delete.yaml @@ -349,4 +349,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_merge.yaml index 58ee4de7b..0c5a6ca11 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_merge.yaml @@ -160,5 +160,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_override.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_override.yaml index 829812788..53dd16c1e 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_override.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_override.yaml @@ -200,5 +200,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_replace.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_replace.yaml index adf49295f..c8d0faee2 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_replace.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_eth_replace.yaml @@ -257,5 +257,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_switches.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_switches.yaml index 5427e2179..779260d62 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_switches.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_switches.yaml @@ -112,5 +112,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml index 63c6ea373..00de715b1 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml @@ -273,5 +273,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_query.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_query.yaml index 5a108b13c..19222844e 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_query.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_query.yaml @@ -216,4 +216,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_delete.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_delete.yaml index eba3a8280..8b2e8e7a5 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_delete.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_delete.yaml @@ -169,5 +169,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_merge.yaml index d76c21c5e..047c0a932 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_merge.yaml @@ -112,5 +112,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_override.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_override.yaml index 8fd0b776d..16b16f1f3 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_override.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_override.yaml @@ -169,5 +169,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_replace.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_replace.yaml index 7f71012b4..e9341a1e7 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_replace.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_lo_replace.yaml @@ -169,5 +169,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_old_format_pb.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_old_format_pb.yaml index 8ea3fcbc4..942f88e0f 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_old_format_pb.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_old_format_pb.yaml @@ -167,4 +167,4 @@ - assert: that: - 'result.failed == false' - - 'result.changed == true' + - 'result.changed == true' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_delete.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_delete.yaml index 5a3e77263..dbdc33ac5 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_delete.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_delete.yaml @@ -238,5 +238,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_merge.yaml index 38f8250ee..509113686 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_merge.yaml @@ -146,5 +146,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_override.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_override.yaml index de753d94d..0f25433e8 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_override.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_override.yaml @@ -189,5 +189,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_replace.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_replace.yaml index a2c597595..f404c5c14 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_replace.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_pc_replace.yaml @@ -229,5 +229,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_delete.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_delete.yaml index 675d144c6..ab209c044 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_delete.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_delete.yaml @@ -182,5 +182,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_merge.yaml index fce374f2b..88b24f5b1 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_merge.yaml @@ -118,5 +118,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_override.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_override.yaml index a3075a168..ed33dffa5 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_override.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_override.yaml @@ -162,5 +162,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_replace.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_replace.yaml index 8cb769133..b19ee84df 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_replace.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_sub_replace.yaml @@ -181,5 +181,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_delete.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_delete.yaml index 99163ad5b..2d08f26ce 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_delete.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_delete.yaml @@ -201,5 +201,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_merge.yaml index 7e3e71285..c482c4eba 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_merge.yaml @@ -127,5 +127,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_override.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_override.yaml index bac35ca18..d86e70862 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_override.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_override.yaml @@ -181,5 +181,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_replace.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_replace.yaml index 6f2f7a04c..f9880cd2a 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_replace.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_vpc_replace.yaml @@ -209,5 +209,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_inventory/tests/dcnm/deleted.yaml b/tests/integration/targets/dcnm_inventory/tests/dcnm/deleted.yaml index 893da6545..cd2263da4 100644 --- a/tests/integration/targets/dcnm_inventory/tests/dcnm/deleted.yaml +++ b/tests/integration/targets/dcnm_inventory/tests/dcnm/deleted.yaml @@ -140,4 +140,4 @@ - name: DELETED - setup - Clean up any existing devices cisco.dcnm.dcnm_inventory: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_inventory/tests/dcnm/merged.yaml b/tests/integration/targets/dcnm_inventory/tests/dcnm/merged.yaml index 01b8eedc4..ac2c2ce6b 100644 --- a/tests/integration/targets/dcnm_inventory/tests/dcnm/merged.yaml +++ b/tests/integration/targets/dcnm_inventory/tests/dcnm/merged.yaml @@ -266,4 +266,4 @@ - name: MERGED - setup - Clean up any existing devices cisco.dcnm.dcnm_inventory: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_inventory/tests/dcnm/overridden.yaml b/tests/integration/targets/dcnm_inventory/tests/dcnm/overridden.yaml index fcefe97f7..2f46b99d3 100644 --- a/tests/integration/targets/dcnm_inventory/tests/dcnm/overridden.yaml +++ b/tests/integration/targets/dcnm_inventory/tests/dcnm/overridden.yaml @@ -91,4 +91,4 @@ - name: OVERRIDDEN - setup - Clean up any existing devices cisco.dcnm.dcnm_inventory: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_inventory/tests/dcnm/query.yaml b/tests/integration/targets/dcnm_inventory/tests/dcnm/query.yaml index 376a6845c..e32e2ae95 100644 --- a/tests/integration/targets/dcnm_inventory/tests/dcnm/query.yaml +++ b/tests/integration/targets/dcnm_inventory/tests/dcnm/query.yaml @@ -195,5 +195,4 @@ - name: QUERY - cleanup - Clean up any existing devices cisco.dcnm.dcnm_inventory: fabric: "{{ ansible_it_fabric }}" - state: deleted - + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml index 84db17ceb..e3854a1e3 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml @@ -93,7 +93,7 @@ - 'result.changed == true' - 'result.response[0].RETURN_CODE == 200' - 'result.response[1].RETURN_CODE == 200' - - 'result.response[2].RETURN_CODE == 200' + - 'result.response[2].RETURN_CODE == 200' - 'result.response[0].MESSAGE == "OK"' - 'result.response[1].MESSAGE == "OK"' - 'result.response[2].MESSAGE == "OK"' @@ -230,7 +230,7 @@ - 'result.changed == true' - 'result.response[0].RETURN_CODE == 200' - 'result.response[1].RETURN_CODE == 200' - - 'result.response[2].RETURN_CODE == 200' + - 'result.response[2].RETURN_CODE == 200' - 'result.response[0].MESSAGE == "OK"' - 'result.response[1].MESSAGE == "OK"' - 'result.response[2].MESSAGE == "OK"' @@ -431,7 +431,7 @@ - 'result.changed == true' - 'result.response[0].RETURN_CODE == 200' - 'result.response[1].RETURN_CODE == 200' - - 'result.response[2].RETURN_CODE == 200' + - 'result.response[2].RETURN_CODE == 200' - 'result.response[0].MESSAGE == "OK"' - 'result.response[1].MESSAGE == "OK"' - 'result.response[2].MESSAGE == "OK"' @@ -627,4 +627,4 @@ - name: DELETED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml index e8f82c27d..e934c83bc 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml @@ -934,5 +934,4 @@ - name: MERGED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted - + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml index d20fbf21c..3522cdddc 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml @@ -326,4 +326,4 @@ - name: OVERRIDDEN - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml index ef5bd7122..91b665106 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml @@ -460,4 +460,4 @@ - name: QUERY - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml index c0e2d053e..aafafa8aa 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml @@ -452,4 +452,4 @@ - name: REPLACED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml index 1aaa7d121..83be2c3a5 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/scale.yaml @@ -1,4 +1,3 @@ - ############################################## ## SETUP ## ############################################## @@ -72,4 +71,4 @@ - name: SCALE - Clean up any existing networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/self-contained-tests/sm_dhcp_params.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/self-contained-tests/sm_dhcp_params.yaml index 1be3fd3d5..5a7b34389 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/self-contained-tests/sm_dhcp_params.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/self-contained-tests/sm_dhcp_params.yaml @@ -53,5 +53,4 @@ - "result.response[0].parent.networkTemplateConfig.vrfDhcp is search('one')" - "result.response[0].parent.networkTemplateConfig.vrfDhcp2 is search('two')" - "result.response[0].parent.networkTemplateConfig.vrfDhcp3 is search('three')" - - "result.response[0].parent.networkTemplateConfig.loopbackId is search('0')" - + - "result.response[0].parent.networkTemplateConfig.loopbackId is search('0')" \ No newline at end of file diff --git a/tests/integration/targets/dcnm_policy/tasks/dcnm.yaml b/tests/integration/targets/dcnm_policy/tasks/dcnm.yaml index 35c47bca6..9e606ce8e 100644 --- a/tests/integration/targets/dcnm_policy/tasks/dcnm.yaml +++ b/tests/integration/targets/dcnm_policy/tasks/dcnm.yaml @@ -53,5 +53,4 @@ - assert: that: - 'item["RETURN_CODE"] == 200' - loop: '{{ result.response }}' - + loop: '{{ result.response }}' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_adc_po_change.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_adc_po_change.yaml index 0e43ae99e..a4d20dd3e 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_adc_po_change.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_adc_po_change.yaml @@ -407,5 +407,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_delete.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_delete.yaml index bcd73cc48..66be11aba 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_delete.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_delete.yaml @@ -532,4 +532,4 @@ - assert: that: - 'item["RETURN_CODE"] == 200' - loop: '{{ result.response }}' + loop: '{{ result.response }}' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_fw_po_change.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_fw_po_change.yaml index 519e63c2e..b1f91a8f4 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_fw_po_change.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_fw_po_change.yaml @@ -260,5 +260,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge.yaml index b8897d0ed..03b861af2 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge.yaml @@ -325,5 +325,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge_existing.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge_existing.yaml index ee4a02111..ad69e0dd9 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge_existing.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_merge_existing.yaml @@ -831,5 +831,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_opt_elems.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_opt_elems.yaml index cb27ca47d..d1cf25611 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_opt_elems.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_opt_elems.yaml @@ -214,5 +214,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_state.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_state.yaml index 20a57852b..4ae8e909e 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_state.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_no_state.yaml @@ -324,5 +324,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_override.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_override.yaml index a09114fcc..f7ed49692 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_override.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_override.yaml @@ -585,5 +585,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_query.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_query.yaml index 7a7d09f7d..12bd2ff32 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_query.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_query.yaml @@ -395,5 +395,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml index 88e70d91f..8c934b447 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml @@ -841,5 +841,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined - + when: IT_CONTEXT is not defined \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tasks/dcnm.yaml b/tests/integration/targets/dcnm_template/tasks/dcnm.yaml index d0d9886a6..ab505bd77 100644 --- a/tests/integration/targets/dcnm_template/tasks/dcnm.yaml +++ b/tests/integration/targets/dcnm_template/tasks/dcnm.yaml @@ -49,5 +49,4 @@ - assert: that: - 'item["RETURN_CODE"] == 200' - loop: '{{ result.response }}' - + loop: '{{ result.response }}' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_delete.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_delete.yaml index 8a55d4387..07ba293d7 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_delete.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_delete.yaml @@ -146,5 +146,4 @@ - 'result.changed == false' - '(result["diff"][0]["merged"] | length) == 0' - '(result["diff"][0]["deleted"] | length) == 0' - - '(result["diff"][0]["query"] | length) == 0' - + - '(result["diff"][0]["query"] | length) == 0' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_merge.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_merge.yaml index f427df7db..740bddc7f 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_merge.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_merge.yaml @@ -218,4 +218,4 @@ - 'result.changed == true' - '(result["diff"][0]["merged"] | length) == 0' - '(result["diff"][0]["deleted"] | length) == 5' - - '(result["diff"][0]["query"] | length) == 0' + - '(result["diff"][0]["query"] | length) == 0' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_modify_properties.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_modify_properties.yaml index 1e6440106..6b8625ebb 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_modify_properties.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_modify_properties.yaml @@ -262,5 +262,4 @@ - 'result.changed == true' - '(result["diff"][0]["merged"] | length) == 0' - '(result["diff"][0]["deleted"] | length) == 4' - - '(result["diff"][0]["query"] | length) == 0' - + - '(result["diff"][0]["query"] | length) == 0' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_no_delete.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_no_delete.yaml index 80b683dfe..c9acaa0d9 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_no_delete.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_no_delete.yaml @@ -88,5 +88,4 @@ - assert: that: - 'item["RETURN_CODE"] == 200' - loop: '{{ result.response }}' - + loop: '{{ result.response }}' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_query.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_query.yaml index b14f6115b..749034fd2 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_query.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_query.yaml @@ -166,5 +166,4 @@ - 'result.changed == true' - '(result["diff"][0]["merged"] | length) == 0' - '(result["diff"][0]["deleted"] | length) == 4' - - '(result["diff"][0]["query"] | length) == 0' - + - '(result["diff"][0]["query"] | length) == 0' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_validation_fail.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_validation_fail.yaml index 11f37c1ab..6e5ce6daa 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_validation_fail.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_validation_fail.yaml @@ -71,5 +71,4 @@ ############################################## ## CLEANUP ## -############################################## - +############################################## \ No newline at end of file diff --git a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_wrong_state.yaml b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_wrong_state.yaml index 35e1683bc..e8244b8af 100644 --- a/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_wrong_state.yaml +++ b/tests/integration/targets/dcnm_template/tests/dcnm/dcnm_template_wrong_state.yaml @@ -51,4 +51,4 @@ - assert: that: - 'result.changed == false' - - 'result["msg"] == "value of state must be one of: merged, deleted, query, got: replaced"' + - 'result["msg"] == "value of state must be one of: merged, deleted, query, got: replaced"' \ No newline at end of file diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml index 262e5b640..4abe5e06d 100644 --- a/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml +++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/scale.yaml @@ -1,4 +1,3 @@ - ############################################## ## SETUP ## ############################################## @@ -72,4 +71,4 @@ - name: SCALE - Clean up any existing vrfs cisco.dcnm.dcnm_vrf: fabric: "{{ ansible_it_fabric }}" - state: deleted + state: deleted \ No newline at end of file diff --git a/tests/integration/targets/prepare_dcnm_policy/tasks/main.yaml b/tests/integration/targets/prepare_dcnm_policy/tasks/main.yaml index 8b64bec4c..3d1590764 100644 --- a/tests/integration/targets/prepare_dcnm_policy/tasks/main.yaml +++ b/tests/integration/targets/prepare_dcnm_policy/tasks/main.yaml @@ -271,5 +271,4 @@ when: (my_idx == (result["diff"][0]["merged"] | length)) loop: '{{ result.response }}' loop_control: - index_var: my_idx - + index_var: my_idx \ No newline at end of file diff --git a/tests/integration/targets/prepare_dcnm_service_route_peering/tasks/main.yaml b/tests/integration/targets/prepare_dcnm_service_route_peering/tasks/main.yaml index b087fc373..50867eb87 100644 --- a/tests/integration/targets/prepare_dcnm_service_route_peering/tasks/main.yaml +++ b/tests/integration/targets/prepare_dcnm_service_route_peering/tasks/main.yaml @@ -220,5 +220,4 @@ - name: Initialize the setup - sleep for 180 seconds for DCNM to completely update the state wait_for: - timeout: 180 - + timeout: 180 \ No newline at end of file diff --git a/tests/integration/targets/prepare_dcnm_template/tasks/main.yaml b/tests/integration/targets/prepare_dcnm_template/tasks/main.yaml index 4b0d8d49a..c30132ac9 100644 --- a/tests/integration/targets/prepare_dcnm_template/tasks/main.yaml +++ b/tests/integration/targets/prepare_dcnm_template/tasks/main.yaml @@ -201,5 +201,4 @@ when: (my_idx == (result["diff"][0]["merged"] | length)) loop: '{{ result.response }}' loop_control: - index_var: my_idx - + index_var: my_idx \ No newline at end of file diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index 573064ba8..d86f3a0a9 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -8,3 +8,5 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_rest.py import-2.6!skip +plugins/modules/dcnm_rest.py import-2.7!skip \ No newline at end of file diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 573064ba8..94d404ba2 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -8,3 +8,11 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_rest.py import-2.6!skip +plugins/modules/dcnm_rest.py import-2.7!skip +plugins/httpapi/dcnm.py import-2.7!skip +plugins/httpapi/dcnm.py import-3.5!skip +plugins/httpapi/dcnm.py import-3.6!skip +plugins/httpapi/dcnm.py import-3.7!skip +plugins/httpapi/dcnm.py import-3.8!skip +plugins/httpapi/dcnm.py import-3.9!skip \ No newline at end of file diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt new file mode 100644 index 000000000..f54169225 --- /dev/null +++ b/tests/sanity/ignore-2.12.txt @@ -0,0 +1,15 @@ +plugins/modules/dcnm_vrf.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_network.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_interface.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_inventory.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_rest.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_rest.py import-2.6!skip +plugins/modules/dcnm_rest.py import-2.7!skip +plugins/httpapi/dcnm.py import-3.8!skip +plugins/httpapi/dcnm.py import-3.9!skip +plugins/httpapi/dcnm.py import-3.10!skip \ No newline at end of file diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index 573064ba8..d86f3a0a9 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -8,3 +8,5 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_rest.py import-2.6!skip +plugins/modules/dcnm_rest.py import-2.7!skip \ No newline at end of file diff --git a/tests/unit/modules/dcnm/dcnm_module.py b/tests/unit/modules/dcnm/dcnm_module.py index 94f99c23a..ac2624477 100644 --- a/tests/unit/modules/dcnm/dcnm_module.py +++ b/tests/unit/modules/dcnm/dcnm_module.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,25 +13,33 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type import os import json -from ansible_collections.ansible.netcommon.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase -from ansible_collections.ansible.netcommon.tests.unit.modules.utils import set_module_args as _set_module_args +from ansible_collections.ansible.netcommon.tests.unit.modules.utils import ( + AnsibleExitJson, + AnsibleFailJson, + ModuleTestCase, +) +from ansible_collections.ansible.netcommon.tests.unit.modules.utils import ( + set_module_args as _set_module_args, +) def set_module_args(args): return _set_module_args(args) -fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') +fixture_path = os.path.join(os.path.dirname(__file__), "fixtures") fixture_data = {} + def loadPlaybookData(module_name): - path = os.path.join(fixture_path, "{}.json".format(module_name)) + path = os.path.join(fixture_path, "{0}.json".format(module_name)) with open(path) as f: data = f.read() @@ -45,7 +51,8 @@ def loadPlaybookData(module_name): return j_data -def load_fixture(module_name, name, device=''): + +def load_fixture(module_name, name, device=""): path = os.path.join(fixture_path, module_name, device, name) if not os.path.exists(path): path = os.path.join(fixture_path, module_name, name) @@ -66,9 +73,10 @@ def load_fixture(module_name, name, device=''): class TestDcnmModule(ModuleTestCase): - - def execute_module_devices(self, failed=False, changed=False, response=None, sort=True, defaults=False): - module_name = self.module.__name__.rsplit('.', 1)[1] + def execute_module_devices( + self, failed=False, changed=False, response=None, sort=True, defaults=False + ): + module_name = self.module.__name__.rsplit(".", 1)[1] local_fixture_path = os.path.join(fixture_path, module_name) models = [] @@ -77,30 +85,36 @@ def execute_module_devices(self, failed=False, changed=False, response=None, sor if os.path.isdir(path): models.append(os.path.basename(path)) if not models: - models = [''] + models = [""] retvals = {} for model in models: - retvals[model] = self.execute_module(failed, changed, response, sort, device=model) + retvals[model] = self.execute_module( + failed, changed, response, sort, device=model + ) return retvals - def execute_module(self, failed=False, changed=False, response=None, sort=True, device=''): + def execute_module( + self, failed=False, changed=False, response=None, sort=True, device="" + ): self.load_fixtures(response, device=device) if failed: result = self.failed() - self.assertTrue(result['failed'], result) + self.assertTrue(result["failed"], result) else: result = self.changed(changed) - self.assertEqual(result['changed'], changed, result) + self.assertEqual(result["changed"], changed, result) if response is not None: if sort: - self.assertEqual(sorted(response), sorted(result['response']), result['response']) + self.assertEqual( + sorted(response), sorted(result["response"]), result["response"] + ) else: - self.assertEqual(response, result['response'], result['response']) + self.assertEqual(response, result["response"], result["response"]) return result @@ -109,7 +123,7 @@ def failed(self): self.module.main() result = exc.exception.args[0] - self.assertTrue(result['failed'], result) + self.assertTrue(result["failed"], result) return result def changed(self, changed=False): @@ -117,8 +131,8 @@ def changed(self, changed=False): self.module.main() result = exc.exception.args[0] - self.assertEqual(result['changed'], changed, result) + self.assertEqual(result["changed"], changed, result) return result - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): pass diff --git a/tests/unit/modules/dcnm/test_dcnm_intf.py b/tests/unit/modules/dcnm/test_dcnm_intf.py index 925b217d9..2b4508dce 100644 --- a/tests/unit/modules/dcnm/test_dcnm_intf.py +++ b/tests/unit/modules/dcnm/test_dcnm_intf.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,16 +13,20 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch + # from units.compat.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_interface from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy + class TestDcnmIntfModule(TestDcnmModule): @@ -35,31 +37,39 @@ class TestDcnmIntfModule(TestDcnmModule): def init_data(self): pass - def log_msg (self, msg): + def log_msg(self, msg): if fd is None: fd = open("intf-ut.log", "w") - self.fd.write (msg) + self.fd.write(msg) self.fd.flush() - def log_msg (self, msg): - self.fd.write (msg) + def log_msg(self, msg): + self.fd.write(msg) def setUp(self): super(TestDcnmIntfModule, self).setUp() - self.mock_dcnm_fabric_details = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.get_fabric_inventory_details') + self.mock_dcnm_fabric_details = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.get_fabric_inventory_details" + ) self.run_dcnm_fabric_details = self.mock_dcnm_fabric_details.start() - self.mock_dcnm_ip_sn = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.get_ip_sn_dict') + self.mock_dcnm_ip_sn = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.get_ip_sn_dict" + ) self.run_dcnm_ip_sn = self.mock_dcnm_ip_sn.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.dcnm_send') - self.run_dcnm_send = self.mock_dcnm_send.start() + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_interface.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() def tearDown(self): @@ -69,792 +79,1214 @@ def tearDown(self): self.mock_dcnm_ip_sn.stop() self.mock_dcnm_fabric_details.stop() -#################################### GEN-FIXTURES ############################ + # -------------------------- GEN-FIXTURES -------------------------- - def load_multi_intf_fixtures (self): + def load_multi_intf_fixtures(self): - if ('_multi_intf_merged_new' in self._testMethodName): + if "_multi_intf_merged_new" in self._testMethodName: # No I/F exists case - playbook_pc_intf = [] - playbook_vpc_intf = [] - playbook_subint_intf = [] - playbook_lo_intf = [] - playbook_eth_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_pc_intf, playbook_vpc_intf, - playbook_subint_intf, playbook_lo_intf, - playbook_eth_intf, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - def load_missing_intf_elems_fixtures (self): - - if ('_missing_intf_elems' in self._testMethodName): + playbook_pc_intf = [] + playbook_vpc_intf = [] + playbook_subint_intf = [] + playbook_lo_intf = [] + playbook_eth_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_pc_intf, + playbook_vpc_intf, + playbook_subint_intf, + playbook_lo_intf, + playbook_eth_intf, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + def load_missing_intf_elems_fixtures(self): + + if "_missing_intf_elems" in self._testMethodName: # No I/F exists case - playbook_pc_intf1 = [] - playbook_pc_intf2 = [] - playbook_vpc_intf = [] - playbook_eth_intf = [] - playbook_subint_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, playbook_vpc_intf, - playbook_subint_intf, playbook_eth_intf, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - def load_mixed_intf_elems_fixtures (self): - - if ('_mixed_intf_merged_new' in self._testMethodName): + playbook_pc_intf1 = [] + playbook_pc_intf2 = [] + playbook_vpc_intf = [] + playbook_eth_intf = [] + playbook_subint_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_vpc_intf, + playbook_subint_intf, + playbook_eth_intf, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + def load_mixed_intf_elems_fixtures(self): + + if "_mixed_intf_merged_new" in self._testMethodName: # No I/F exists case - playbook_pc_intf = [] - playbook_eth_intf = [] - playbook_lo_intf = [] - playbook_subint_intf = [] - playbook_vpc_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_pc_intf, - playbook_eth_intf, playbook_vpc_intf, - playbook_lo_intf, playbook_subint_intf, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - def load_bunched_intf_elems_fixtures (self): - - if ('_bunched_intf_merged_new' in self._testMethodName): + playbook_pc_intf = [] + playbook_eth_intf = [] + playbook_lo_intf = [] + playbook_subint_intf = [] + playbook_vpc_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_pc_intf, + playbook_eth_intf, + playbook_vpc_intf, + playbook_lo_intf, + playbook_subint_intf, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + def load_bunched_intf_elems_fixtures(self): + + if "_bunched_intf_merged_new" in self._testMethodName: # No I/F exists case - playbook_pc_intf1 = [] - playbook_pc_intf2 = [] - playbook_pc_intf3 = [] - playbook_pc_intf4 = [] - playbook_eth_intf1 = [] - playbook_eth_intf2 = [] - playbook_eth_intf3 = [] - playbook_eth_intf4 = [] - playbook_vpc_intf1 = [] - playbook_vpc_intf2 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, - playbook_pc_intf3, playbook_pc_intf4, - playbook_eth_intf1, playbook_eth_intf2, - playbook_eth_intf3, playbook_eth_intf4, - playbook_vpc_intf1, playbook_vpc_intf2, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - - def load_missing_members_fixtures (self): - - if ('_missing_peer_members' in self._testMethodName): + playbook_pc_intf1 = [] + playbook_pc_intf2 = [] + playbook_pc_intf3 = [] + playbook_pc_intf4 = [] + playbook_eth_intf1 = [] + playbook_eth_intf2 = [] + playbook_eth_intf3 = [] + playbook_eth_intf4 = [] + playbook_vpc_intf1 = [] + playbook_vpc_intf2 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_pc_intf3, + playbook_pc_intf4, + playbook_eth_intf1, + playbook_eth_intf2, + playbook_eth_intf3, + playbook_eth_intf4, + playbook_vpc_intf1, + playbook_vpc_intf2, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + def load_missing_members_fixtures(self): + + if "_missing_peer_members" in self._testMethodName: # No I/F exists case - playbook_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_intf, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - def load_type_missing_fixtures (self): - - if ('_type_missing_merged_new' in self._testMethodName): + playbook_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_intf, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + def load_type_missing_fixtures(self): + + if "_type_missing_merged_new" in self._testMethodName: # No I/F exists case - playbook_pc_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - def load_missing_state_fixtures (self): - - if ('_missing_state' in self._testMethodName): + playbook_pc_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + def load_missing_state_fixtures(self): + + if "_missing_state" in self._testMethodName: # No I/F exists case - playbook_pc_intf = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - def load_query_state_fixtures (self): - - if ('_query' in self._testMethodName): - playbook_all_intf = self.payloads_data.get('all_payload') - playbook_pc_intf = self.payloads_data.get('pc_payload') - playbook_lo_intf = self.payloads_data.get('lo_payload') - playbook_eth_intf = self.payloads_data.get('eth_payload') - playbook_sub_intf = self.payloads_data.get('subint_payload') - playbook_vpc_intf = self.payloads_data.get('vpc_payload') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - self.playbook_mock_vpc_resp, - playbook_all_intf, playbook_pc_intf, - playbook_lo_intf, playbook_eth_intf, - playbook_sub_intf, playbook_vpc_intf, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - -#################################### PC-FIXTURES ############################ - - def load_pc_fixtures (self): - - if ('_pc_merged_new' in self._testMethodName): + playbook_pc_intf = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + def load_query_state_fixtures(self): + + if "_query" in self._testMethodName: + playbook_all_intf = self.payloads_data.get("all_payload") + playbook_pc_intf = self.payloads_data.get("pc_payload") + playbook_lo_intf = self.payloads_data.get("lo_payload") + playbook_eth_intf = self.payloads_data.get("eth_payload") + playbook_sub_intf = self.payloads_data.get("subint_payload") + playbook_vpc_intf = self.payloads_data.get("vpc_payload") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_all_intf, + playbook_pc_intf, + playbook_lo_intf, + playbook_eth_intf, + playbook_sub_intf, + playbook_vpc_intf, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + # -------------------------- PC-FIXTURES -------------------------- + + def load_pc_fixtures(self): + + if "_pc_merged_new" in self._testMethodName: # No I/F exists case - playbook_pc_intf1 = [] - playbook_pc_intf2 = [] - playbook_pc_intf3 = [] - playbook_pc_intf4 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, - playbook_pc_intf3, playbook_pc_intf4, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] - if ('_pc_merged_policy_change' in self._testMethodName): - playbook_pc_intf1 = self.payloads_data.get('pc_merged_trunk_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] - - - if ('_pc_merged_idempotent' in self._testMethodName): - playbook_pc_intf1 = self.payloads_data.get('pc_merged_trunk_payloads') - playbook_pc_intf2 = self.payloads_data.get('pc_merged_access_payloads') - playbook_pc_intf3 = self.payloads_data.get('pc_merged_l3_payloads') - playbook_pc_intf4 = self.payloads_data.get('pc_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, - playbook_pc_intf3, playbook_pc_intf4, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] + playbook_pc_intf1 = [] + playbook_pc_intf2 = [] + playbook_pc_intf3 = [] + playbook_pc_intf4 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_pc_intf3, + playbook_pc_intf4, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + if "_pc_merged_policy_change" in self._testMethodName: + playbook_pc_intf1 = self.payloads_data.get("pc_merged_trunk_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_pc_merged_idempotent" in self._testMethodName: + playbook_pc_intf1 = self.payloads_data.get("pc_merged_trunk_payloads") + playbook_pc_intf2 = self.payloads_data.get("pc_merged_access_payloads") + playbook_pc_intf3 = self.payloads_data.get("pc_merged_l3_payloads") + playbook_pc_intf4 = self.payloads_data.get("pc_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_pc_intf3, + playbook_pc_intf4, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] # Use the same payloads that we use for creating new. - if ('_pc_deleted_existing' in self._testMethodName): - playbook_pc_intf1 = self.payloads_data.get('pc_merged_trunk_payloads') - playbook_pc_intf2 = self.payloads_data.get('pc_merged_access_payloads') - playbook_pc_intf3 = self.payloads_data.get('pc_merged_l3_payloads') - playbook_pc_intf4 = self.payloads_data.get('pc_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, - playbook_pc_intf3, playbook_pc_intf4, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] - - if ('_pc_replaced_existing' in self._testMethodName): - playbook_pc_intf1 = self.payloads_data.get('pc_merged_trunk_payloads') - playbook_pc_intf2 = self.payloads_data.get('pc_merged_access_payloads') - playbook_pc_intf3 = self.payloads_data.get('pc_merged_l3_payloads') - playbook_pc_intf4 = self.payloads_data.get('pc_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_pc_intf2, - playbook_pc_intf3, playbook_pc_intf4, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] - - if ('_pc_overridden_existing' in self._testMethodName): - - playbook_pc_intf1 = self.payloads_data.get('pc_merged_trunk_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_pc_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - ] - -#################################### ETH-FIXTURES ############################ - - def load_eth_fixtures (self): - - if ('_eth_merged_new' in self._testMethodName): + if "_pc_deleted_existing" in self._testMethodName: + playbook_pc_intf1 = self.payloads_data.get("pc_merged_trunk_payloads") + playbook_pc_intf2 = self.payloads_data.get("pc_merged_access_payloads") + playbook_pc_intf3 = self.payloads_data.get("pc_merged_l3_payloads") + playbook_pc_intf4 = self.payloads_data.get("pc_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_pc_intf3, + playbook_pc_intf4, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_pc_replaced_existing" in self._testMethodName: + playbook_pc_intf1 = self.payloads_data.get("pc_merged_trunk_payloads") + playbook_pc_intf2 = self.payloads_data.get("pc_merged_access_payloads") + playbook_pc_intf3 = self.payloads_data.get("pc_merged_l3_payloads") + playbook_pc_intf4 = self.payloads_data.get("pc_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_pc_intf2, + playbook_pc_intf3, + playbook_pc_intf4, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_pc_overridden_existing" in self._testMethodName: + + playbook_pc_intf1 = self.payloads_data.get("pc_merged_trunk_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_pc_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + # -------------------------- ETH-FIXTURES -------------------------- + + def load_eth_fixtures(self): + + if "_eth_merged_new" in self._testMethodName: # No I/F exists case - playbook_eth_intf1 = [] - playbook_eth_intf2 = [] - playbook_eth_intf3 = [] - playbook_eth_intf4 = [] - playbook_eth_intf5 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_eth_intf1, playbook_eth_intf2, - playbook_eth_intf3, playbook_eth_intf4, - playbook_eth_intf5, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - - if ('_eth_merged_existing' in self._testMethodName): + playbook_eth_intf1 = [] + playbook_eth_intf2 = [] + playbook_eth_intf3 = [] + playbook_eth_intf4 = [] + playbook_eth_intf5 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_eth_intf1, + playbook_eth_intf2, + playbook_eth_intf3, + playbook_eth_intf4, + playbook_eth_intf5, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_eth_merged_existing" in self._testMethodName: # No I/F exists case - playbook_eth_intf1 = self.payloads_data.get('eth_merged_routed_payloads_eth_1_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_eth_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_eth_merged_idempotent' in self._testMethodName): - - playbook_eth_intf1 = self.payloads_data.get('eth_merged_trunk_payloads') - playbook_eth_intf2 = self.payloads_data.get('eth_merged_access_payloads') - playbook_eth_intf3 = self.payloads_data.get('eth_merged_routed_payloads') - playbook_eth_intf4 = self.payloads_data.get('eth_merged_epl_routed_payloads') - playbook_eth_intf5 = self.payloads_data.get('eth_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_eth_intf1, playbook_eth_intf2, - playbook_eth_intf3, playbook_eth_intf4, - playbook_eth_intf5, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_eth_replaced_existing' in self._testMethodName): - - playbook_eth_intf1 = self.payloads_data.get('eth_merged_trunk_payloads') - playbook_eth_intf2 = self.payloads_data.get('eth_merged_access_payloads') - playbook_eth_intf3 = self.payloads_data.get('eth_merged_routed_payloads') - playbook_eth_intf4 = self.payloads_data.get('eth_merged_epl_routed_payloads') - playbook_eth_intf5 = self.payloads_data.get('eth_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_eth_intf1, playbook_eth_intf2, - playbook_eth_intf3, playbook_eth_intf4, - playbook_eth_intf5, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_eth_deleted_existing' in self._testMethodName): - - playbook_eth_intf1 = self.payloads_data.get('eth_merged_trunk_payloads') - playbook_eth_intf2 = self.payloads_data.get('eth_merged_access_payloads') - playbook_eth_intf3 = self.payloads_data.get('eth_merged_routed_payloads') - playbook_eth_intf4 = self.payloads_data.get('eth_merged_epl_routed_payloads') - playbook_eth_intf5 = self.payloads_data.get('eth_merged_monitor_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('eth_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_have_all_data, - playbook_eth_intf1, playbook_eth_intf2, - playbook_eth_intf3, playbook_eth_intf4, - playbook_eth_intf5, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_eth_overridden_existing' in self._testMethodName): - - playbook_eth_intf1 = self.payloads_data.get('eth_merged_trunk_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_eth_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data - ] - -#################################### SUBINT-FIXTURES ############################ - - def load_subint_fixtures (self): - - if ('_subint_merged_new' in self._testMethodName): + playbook_eth_intf1 = self.payloads_data.get( + "eth_merged_routed_payloads_eth_1_2" + ) + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_eth_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_eth_merged_idempotent" in self._testMethodName: + + playbook_eth_intf1 = self.payloads_data.get("eth_merged_trunk_payloads") + playbook_eth_intf2 = self.payloads_data.get("eth_merged_access_payloads") + playbook_eth_intf3 = self.payloads_data.get("eth_merged_routed_payloads") + playbook_eth_intf4 = self.payloads_data.get( + "eth_merged_epl_routed_payloads" + ) + playbook_eth_intf5 = self.payloads_data.get("eth_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_eth_intf1, + playbook_eth_intf2, + playbook_eth_intf3, + playbook_eth_intf4, + playbook_eth_intf5, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_eth_replaced_existing" in self._testMethodName: + + playbook_eth_intf1 = self.payloads_data.get("eth_merged_trunk_payloads") + playbook_eth_intf2 = self.payloads_data.get("eth_merged_access_payloads") + playbook_eth_intf3 = self.payloads_data.get("eth_merged_routed_payloads") + playbook_eth_intf4 = self.payloads_data.get( + "eth_merged_epl_routed_payloads" + ) + playbook_eth_intf5 = self.payloads_data.get("eth_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_eth_intf1, + playbook_eth_intf2, + playbook_eth_intf3, + playbook_eth_intf4, + playbook_eth_intf5, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_eth_deleted_existing" in self._testMethodName: + + playbook_eth_intf1 = self.payloads_data.get("eth_merged_trunk_payloads") + playbook_eth_intf2 = self.payloads_data.get("eth_merged_access_payloads") + playbook_eth_intf3 = self.payloads_data.get("eth_merged_routed_payloads") + playbook_eth_intf4 = self.payloads_data.get( + "eth_merged_epl_routed_payloads" + ) + playbook_eth_intf5 = self.payloads_data.get("eth_merged_monitor_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("eth_payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_have_all_data, + playbook_eth_intf1, + playbook_eth_intf2, + playbook_eth_intf3, + playbook_eth_intf4, + playbook_eth_intf5, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_eth_overridden_existing" in self._testMethodName: + + playbook_eth_intf1 = self.payloads_data.get("eth_merged_trunk_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_eth_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + # -------------------------- SUBINT-FIXTURES -------------------------- + + def load_subint_fixtures(self): + + if "_subint_merged_new" in self._testMethodName: # No I/F exists case - playbook_sub_intf1 = [] - playbook_sub_intf2 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_sub_intf1, playbook_sub_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, playbook_deployed_data] - - if ('_subint_merged_idempotent' in self._testMethodName): - - playbook_subint_intf1 = self.payloads_data.get('subint_merged_payloads_1') - playbook_subint_intf2 = self.payloads_data.get('subint_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_subint_intf1, playbook_subint_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_subint_replaced_existing' in self._testMethodName): - - playbook_subint_intf1 = self.payloads_data.get('subint_merged_payloads_1') - playbook_subint_intf2 = self.payloads_data.get('subint_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_subint_intf1, playbook_subint_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_subint_replaced_non_existing' in self._testMethodName): - - playbook_subint_intf1 = self.payloads_data.get('subint_merged_payloads_1') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_subint_intf1, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_subint_deleted_existing' in self._testMethodName): - - playbook_subint_intf1 = self.payloads_data.get('subint_merged_payloads_1') - playbook_subint_intf2 = self.payloads_data.get('subint_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_subint_intf1, playbook_subint_intf2, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_subint_deleted_non_existing' in self._testMethodName): - - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - [], - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_subint_overridden_existing' in self._testMethodName): - - playbook_subint_intf1 = self.payloads_data.get('subint_merged_payloads_1') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_subint_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - -#################################### LOOPBACK-FIXTURES ############################ - - def load_lo_fixtures (self): - - if ('_lo_merged_new' in self._testMethodName): + playbook_sub_intf1 = [] + playbook_sub_intf2 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_sub_intf1, + playbook_sub_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_subint_merged_idempotent" in self._testMethodName: + + playbook_subint_intf1 = self.payloads_data.get("subint_merged_payloads_1") + playbook_subint_intf2 = self.payloads_data.get("subint_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_subint_intf1, + playbook_subint_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_subint_replaced_existing" in self._testMethodName: + + playbook_subint_intf1 = self.payloads_data.get("subint_merged_payloads_1") + playbook_subint_intf2 = self.payloads_data.get("subint_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_subint_intf1, + playbook_subint_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_subint_replaced_non_existing" in self._testMethodName: + + playbook_subint_intf1 = self.payloads_data.get("subint_merged_payloads_1") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_subint_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_subint_deleted_existing" in self._testMethodName: + + playbook_subint_intf1 = self.payloads_data.get("subint_merged_payloads_1") + playbook_subint_intf2 = self.payloads_data.get("subint_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_subint_intf1, + playbook_subint_intf2, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_subint_deleted_non_existing" in self._testMethodName: + + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + [], + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_subint_overridden_existing" in self._testMethodName: + + playbook_subint_intf1 = self.payloads_data.get("subint_merged_payloads_1") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_subint_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + # -------------------------- LOOPBACK-FIXTURES -------------------------- + + def load_lo_fixtures(self): + + if "_lo_merged_new" in self._testMethodName: # No I/F exists case - playbook_lo_intf1 = [] - playbook_lo_intf2 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_lo_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_lo_merged_idempotent' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_lo_intf2 = self.payloads_data.get('lo_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_lo_intf2, - playbook_have_all_data, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - - if ('_lo_merged_existing' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - - if ('_lo_replaced_existing' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_lo_intf2 = self.payloads_data.get('lo_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_lo_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_lo_deleted_existing' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_lo_intf2 = self.payloads_data.get('lo_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_lo_intf2, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] + playbook_lo_intf1 = [] + playbook_lo_intf2 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_lo_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_lo_merged_idempotent" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_lo_intf2 = self.payloads_data.get("lo_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_lo_intf2, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_lo_merged_existing" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_lo_replaced_existing" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_lo_intf2 = self.payloads_data.get("lo_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_lo_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_lo_deleted_existing" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_lo_intf2 = self.payloads_data.get("lo_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_lo_intf2, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] # We are overriding 2 interfaces here which is different from other cases. So we need # side-effects for both - if ('_lo_overridden_existing' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_lo_intf2 = self.payloads_data.get('lo_merged_payloads_2') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_lo_intf2, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_lo_overridden_non_existing' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_1') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_lo_overridden_existing_2' in self._testMethodName): - - playbook_lo_intf1 = self.payloads_data.get('lo_merged_payloads_3') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, - playbook_lo_intf1, playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp] - -#################################### vPC-FIXTURES ############################ - - def load_vpc_fixtures (self): - - if ('_vpc_merged_new' in self._testMethodName): + if "_lo_overridden_existing" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_lo_intf2 = self.payloads_data.get("lo_merged_payloads_2") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_lo_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_lo_overridden_non_existing" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_1") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_lo_overridden_existing_2" in self._testMethodName: + + playbook_lo_intf1 = self.payloads_data.get("lo_merged_payloads_3") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + playbook_lo_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + # -------------------------- vPC-FIXTURES -------------------------- + + def load_vpc_fixtures(self): + + if "_vpc_merged_new" in self._testMethodName: # No I/F exists case - playbook_vpc_intf1 = [] - playbook_vpc_intf2 = [] - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_vpc_intf1, playbook_vpc_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_vpc_merged_idempotent' in self._testMethodName): - playbook_vpc_intf1 = self.payloads_data.get('vpc_merged_trunk_payloads') - playbook_vpc_intf2 = self.payloads_data.get('vpc_merged_access_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_vpc_intf1, playbook_vpc_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] + playbook_vpc_intf1 = [] + playbook_vpc_intf2 = [] + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_vpc_intf1, + playbook_vpc_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_vpc_merged_idempotent" in self._testMethodName: + playbook_vpc_intf1 = self.payloads_data.get("vpc_merged_trunk_payloads") + playbook_vpc_intf2 = self.payloads_data.get("vpc_merged_access_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_vpc_intf1, + playbook_vpc_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] # Use the same payloads that we use for creating new. - if ('_vpc_deleted_existing' in self._testMethodName): - playbook_vpc_intf1 = self.payloads_data.get('vpc_merged_trunk_payloads') - playbook_vpc_intf2 = self.payloads_data.get('vpc_merged_access_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_vpc_intf1, playbook_vpc_intf2, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp] - - if ('_vpc_replaced_existing' in self._testMethodName): - playbook_vpc_intf1 = self.payloads_data.get('vpc_merged_trunk_payloads') - playbook_vpc_intf2 = self.payloads_data.get('vpc_merged_access_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_vpc_intf1, playbook_vpc_intf2, - playbook_have_all_data, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - playbook_deployed_data] - - if ('_vpc_overridden_existing' in self._testMethodName): - - playbook_vpc_intf1 = self.payloads_data.get('vpc_merged_trunk_payloads') - playbook_have_all_data = self.have_all_payloads_data.get('payloads') - playbook_deployed_data = self.have_all_payloads_data.get('deployed_payloads') - - self.run_dcnm_send.side_effect = [self.playbook_mock_vpc_resp, self.playbook_mock_vpc_resp, - playbook_vpc_intf1, - playbook_have_all_data, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - self.playbook_mock_succ_resp, self.playbook_mock_succ_resp, - playbook_deployed_data, - ] - -#################################### END-FIXTURES ############################ - - def load_fixtures(self, response=None, device=''): + if "_vpc_deleted_existing" in self._testMethodName: + playbook_vpc_intf1 = self.payloads_data.get("vpc_merged_trunk_payloads") + playbook_vpc_intf2 = self.payloads_data.get("vpc_merged_access_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_vpc_intf1, + playbook_vpc_intf2, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + ] + + if "_vpc_replaced_existing" in self._testMethodName: + playbook_vpc_intf1 = self.payloads_data.get("vpc_merged_trunk_payloads") + playbook_vpc_intf2 = self.payloads_data.get("vpc_merged_access_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_vpc_intf1, + playbook_vpc_intf2, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + if "_vpc_overridden_existing" in self._testMethodName: + + playbook_vpc_intf1 = self.payloads_data.get("vpc_merged_trunk_payloads") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get( + "deployed_payloads" + ) + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_vpc_intf1, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data, + ] + + # -------------------------- END-FIXTURES -------------------------- + + def load_fixtures(self, response=None, device=""): # setup the side effects self.run_dcnm_fabric_details.side_effect = [self.mock_fab_inv] @@ -862,1181 +1294,1399 @@ def load_fixtures(self, response=None, device=''): self.run_dcnm_version_supported.side_effect = [11] # Load port channel related side-effects - self.load_pc_fixtures () + self.load_pc_fixtures() # Load ethernet related side-effects - self.load_eth_fixtures () + self.load_eth_fixtures() # Load subint related side-effects - self.load_subint_fixtures () + self.load_subint_fixtures() # Load loopback related side-effects - self.load_lo_fixtures () + self.load_lo_fixtures() # Load vPC related side-effects - self.load_vpc_fixtures () + self.load_vpc_fixtures() # Load Multiple interafces related side-effects - self.load_multi_intf_fixtures () + self.load_multi_intf_fixtures() # Load Missing interface elements related side-effects - self.load_missing_intf_elems_fixtures () + self.load_missing_intf_elems_fixtures() # Load mixed interface configuration related side-effects - self.load_mixed_intf_elems_fixtures () + self.load_mixed_intf_elems_fixtures() # Load bunched interface configuration related side-effects - self.load_bunched_intf_elems_fixtures () + self.load_bunched_intf_elems_fixtures() # Load missing elements interface configuration related side-effects - self.load_type_missing_fixtures () - self.load_missing_state_fixtures () - self.load_missing_members_fixtures () - self.load_query_state_fixtures () + self.load_type_missing_fixtures() + self.load_missing_state_fixtures() + self.load_missing_members_fixtures() + self.load_query_state_fixtures() -####################################GEN-INTF ############################ + # -------------------------- GEN-INTF -------------------------- def test_dcnm_intf_multi_intf_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_multi_intf_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') - self.payloads_data = [] + self.config_data = loadPlaybookData("dcnm_intf_multi_intf_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") + self.payloads_data = [] # load required config data - self.playbook_config = self.config_data.get('multi_intf_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - - set_module_args(dict(state='merged', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("multi_intf_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 5) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Port-channel300', - 'vPC301', - 'Ethernet1/1.1', - 'Ethernet1/10', - 'Loopback303']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Port-channel300", + "vPC301", + "Ethernet1/1.1", + "Ethernet1/10", + "Loopback303", + ] + ), + True, + ) def test_dcnm_intf_missing_intf_elems_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_multi_intf_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_multi_intf_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") self.payloads_data = [] # load required config data - self.playbook_config = self.config_data.get('missing_intf_elems_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("missing_intf_elems_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 5) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Port-channel301', - 'Port-channel302', - 'Ethernet1/25.1', - 'Ethernet1/32', - 'vPC751']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Port-channel301", + "Port-channel302", + "Ethernet1/25.1", + "Ethernet1/32", + "vPC751", + ] + ), + True, + ) def test_dcnm_intf_check_multi_intf_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_multi_intf_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') - self.payloads_data = [] + self.config_data = loadPlaybookData("dcnm_intf_multi_intf_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") + self.payloads_data = [] # load required config data - self.playbook_config = self.config_data.get('multi_intf_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - - set_module_args(dict(state='merged', - _ansible_check_mode=True, - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("multi_intf_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", + _ansible_check_mode=True, + fabric="test_fabric", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 5) - self.assertFalse(result.get('response')) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Port-channel300', - 'vPC301', - 'Ethernet1/1.1', - 'Ethernet1/10', - 'Loopback303']), True) - -#################################### PC ############################ + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertFalse(result.get("response")) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Port-channel300", + "vPC301", + "Ethernet1/1.1", + "Ethernet1/10", + "Loopback303", + ] + ), + True, + ) + + # -------------------------- PC -------------------------- def test_dcnm_intf_pc_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 4) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Port-channel300', 'Port-channel301', - 'Port-channel302', 'Port-channel303']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 4) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Port-channel300", + "Port-channel301", + "Port-channel302", + "Port-channel303", + ] + ), + True, + ) def test_dcnm_intf_pc_merged_idempotent(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_merged_config') + self.playbook_config = self.config_data.get("pc_merged_config") for cfg in self.playbook_config: cfg["deploy"] = "False" - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_pc_merged_policy_change(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config_data = self.config_data.get('pc_merged_config_policy_change') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', - config=self.playbook_config_data)) + self.playbook_config_data = self.config_data.get( + "pc_merged_config_policy_change" + ) + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config_data) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) + self.assertEqual(len(result["diff"][0]["merged"]), 1) def test_dcnm_intf_pc_deleted_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_deleted_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_deleted_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 4) - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'] in ['Port-channel300', 'Port-channel301', - 'Port-channel302', 'Port-channel303']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 4) + for intf in result["diff"][0]["deleted"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Port-channel300", + "Port-channel301", + "Port-channel302", + "Port-channel303", + ] + ), + True, + ) def test_dcnm_intf_pc_replaced_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_replaced_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_replaced_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['replaced']), 3) - - changed_objs = ['MEMBER_INTERFACES', 'PC_MODE', 'BPDUGUARD_ENABLED', - 'PORTTYPE_FAST_ENABLED', 'MTU', 'ALLOWED_VLANS', - 'DESC', 'ADMIN_STATE', 'INTF_VRF', 'IP', 'PREFIX', - 'ROUTING_TAG', 'SPEED'] - - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - if_keys = list(intf['nvPairs'].keys()) - self.assertEqual ((set(if_keys).issubset(set(changed_objs))), True) + self.assertEqual(len(result["diff"][0]["replaced"]), 3) + + changed_objs = [ + "MEMBER_INTERFACES", + "PC_MODE", + "BPDUGUARD_ENABLED", + "PORTTYPE_FAST_ENABLED", + "MTU", + "ALLOWED_VLANS", + "DESC", + "ADMIN_STATE", + "INTF_VRF", + "IP", + "PREFIX", + "ROUTING_TAG", + "SPEED", + ] + + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + if_keys = list(intf["nvPairs"].keys()) + self.assertEqual((set(if_keys).issubset(set(changed_objs))), True) # Monitor port wil not be deployes - self.assertEqual(len(result['diff'][0]['deploy']), 3) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) def test_dcnm_intf_pc_overridden_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_overridden_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_overridden_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 6) + self.assertEqual(len(result["diff"][0]["deleted"]), 6) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/3.2', 'ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['port-channel300'] + rep_if_names = ["ethernet1/3.2", "ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["port-channel300"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) -#################################### ETH ############################ + # -------------------------- ETH -------------------------- def test_dcnm_intf_eth_merged_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_merged_config_existing') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("eth_merged_config_existing") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Ethernet1/2']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"] in ["Ethernet1/2"]), True) def test_dcnm_intf_eth_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("eth_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 5) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Ethernet1/30', 'Ethernet1/31', - 'Ethernet1/32', 'Ethernet1/33', 'Ethernet1/34' ]), True) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + ( + intf["ifName"] + in [ + "Ethernet1/30", + "Ethernet1/31", + "Ethernet1/32", + "Ethernet1/33", + "Ethernet1/34", + ] + ), + True, + ) def test_dcnm_intf_eth_merged_idempotent(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') + self.playbook_config = self.config_data.get("eth_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") for cfg in self.playbook_config: cfg["deploy"] = "False" - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_eth_replaced_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_replaced_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("eth_replaced_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['replaced']), 4) - - changed_objs = ['BPDUGUARD_ENABLED', 'PORTTYPE_FAST_ENABLED', 'MTU', 'CONF', - 'ALLOWED_VLANS', 'DESC', 'ADMIN_STATE', 'INTF_VRF', 'ACCESS_VLAN', 'SPEED', - 'IP', 'PREFIX', 'ROUTING_TAG', 'SPEED', 'IPv6', 'IPv6_PREFIX'] - - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - if_keys = list(intf['nvPairs'].keys()) - self.assertEqual ((set(if_keys).issubset(set(changed_objs))), True) + self.assertEqual(len(result["diff"][0]["replaced"]), 4) + + changed_objs = [ + "BPDUGUARD_ENABLED", + "PORTTYPE_FAST_ENABLED", + "MTU", + "CONF", + "ALLOWED_VLANS", + "DESC", + "ADMIN_STATE", + "INTF_VRF", + "ACCESS_VLAN", + "SPEED", + "IP", + "PREFIX", + "ROUTING_TAG", + "SPEED", + "IPv6", + "IPv6_PREFIX", + ] + + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + if_keys = list(intf["nvPairs"].keys()) + self.assertEqual((set(if_keys).issubset(set(changed_objs))), True) # Monitor port will not bedeployed - self.assertEqual(len(result['diff'][0]['deploy']), 4) + self.assertEqual(len(result["diff"][0]["deploy"]), 4) def test_dcnm_intf_eth_deleted_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_deleted_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("eth_deleted_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['merged']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 5) def test_dcnm_intf_eth_overridden_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_eth_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_eth_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_eth_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_eth_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('eth_overridden_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("eth_overridden_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['ethernet1/30'] + rep_if_names = ["ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["ethernet1/30"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) -#################################### SUBINT ############################ + # -------------------------- SUBINT -------------------------- def test_dcnm_intf_subint_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("subint_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 2) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Ethernet1/25.1', 'Ethernet1/25.2']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + (intf["ifName"] in ["Ethernet1/25.1", "Ethernet1/25.2"]), True + ) def test_dcnm_intf_subint_merged_idempotent(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') + self.playbook_config = self.config_data.get("subint_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") for cfg in self.playbook_config: cfg["deploy"] = "False" - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_subint_replaced_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_replaced_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("subint_replaced_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['replaced']), 2) - - changed_objs = ['MTU', 'CONF', 'VLAN', 'DESC', 'ADMIN_STATE', 'SPEED', - 'INTF_VRF', 'IP', 'PREFIX', 'IPv6', 'IPv6_PREFIX'] - - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - if_keys = list(intf['nvPairs'].keys()) - self.assertEqual ((set(if_keys).issubset(set(changed_objs))), True) + self.assertEqual(len(result["diff"][0]["replaced"]), 2) + + changed_objs = [ + "MTU", + "CONF", + "VLAN", + "DESC", + "ADMIN_STATE", + "SPEED", + "INTF_VRF", + "IP", + "PREFIX", + "IPv6", + "IPv6_PREFIX", + ] + + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + if_keys = list(intf["nvPairs"].keys()) + self.assertEqual((set(if_keys).issubset(set(changed_objs))), True) # All 2 will be deployed, even though we have not changed the monitor port - self.assertEqual(len(result['diff'][0]['deploy']), 2) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) def test_dcnm_intf_subint_replaced_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_replaced_config_non_exist') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("subint_replaced_config_non_exist") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['replaced']), 1) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 1) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_subint_deleted_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_deleted_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("subint_deleted_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 2) - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'] in ['Ethernet1/25.1', 'Ethernet1/25.2']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + for intf in result["diff"][0]["deleted"]: + self.assertEqual( + (intf["ifName"] in ["Ethernet1/25.1", "Ethernet1/25.2"]), True + ) def test_dcnm_intf_subint_deleted_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_deleted_non_existing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "subint_deleted_non_existing_config" + ) + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) def test_dcnm_intf_subint_overridden_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_subint_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_subint_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_subint_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_subint_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('subint_overridden_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("subint_overridden_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['Ethernet1/25.1'] + rep_if_names = ["ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["Ethernet1/25.1"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) -#################################### LOOPBACK ############################ + # -------------------------- LOOPBACK -------------------------- def test_dcnm_intf_lo_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 2) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Loopback100', 'Loopback101']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual( + (intf["ifName"] in ["Loopback100", "Loopback101"]), True + ) def test_dcnm_intf_lo_merged_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_merged_existing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_merged_existing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['Loopback100']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"] in ["Loopback100"]), True) def test_dcnm_intf_lo_merged_idempotent(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') + self.playbook_config = self.config_data.get("lo_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") for cfg in self.playbook_config: cfg["deploy"] = "False" - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_lo_replaced_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_replaced_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_replaced_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - - self.assertEqual(len(result['diff'][0]['replaced']), 2) - - changed_objs = ['CONF', 'DESC', 'ADMIN_STATE', 'ROUTE_MAP_TAG', - 'SPEED', 'INTF_VRF', 'IP', 'V6IP'] - - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - if_keys = list(intf['nvPairs'].keys()) - self.assertEqual ((set(if_keys).issubset(set(changed_objs))), True) + self.assertEqual(len(result["diff"][0]["replaced"]), 2) + + changed_objs = [ + "CONF", + "DESC", + "ADMIN_STATE", + "ROUTE_MAP_TAG", + "SPEED", + "INTF_VRF", + "IP", + "V6IP", + ] + + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + if_keys = list(intf["nvPairs"].keys()) + self.assertEqual((set(if_keys).issubset(set(changed_objs))), True) # All 2 will be deployed, even though we have not changed the monitor port - self.assertEqual(len(result['diff'][0]['deploy']), 2) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) def test_dcnm_intf_lo_deleted_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_deleted_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_deleted_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 2) - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'] in ['Loopback100', 'Loopback101']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"] in ["Loopback100", "Loopback101"]), True) def test_dcnm_intf_lo_overridden_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_overridden_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_overridden_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['loopback100', 'loopback101'] + rep_if_names = ["ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["loopback100", "loopback101"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) def test_dcnm_intf_lo_overridden_existing_2(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_overridden_existing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_overridden_existing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 6) + self.assertEqual(len(result["diff"][0]["deleted"]), 6) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "vpc300", + ] - rep_if_names = ['ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['loopback200'] + rep_if_names = ["ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["loopback200"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) def test_dcnm_intf_lo_overridden_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_lo_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_lo_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_lo_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_lo_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('lo_overridden_non_existing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("lo_overridden_non_existing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['loopback900'] + rep_if_names = ["ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["loopback900"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) -#################################### vPC ############################ + # -------------------------- vPC -------------------------- def test_dcnm_intf_vpc_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_vpc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_vpc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("vpc_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 2) - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'] in ['vPC750', 'vPC751']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"] in ["vPC750", "vPC751"]), True) def test_dcnm_intf_vpc_merged_idempotent(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_vpc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_vpc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') + self.playbook_config = self.config_data.get("vpc_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") for cfg in self.playbook_config: cfg["deploy"] = "False" - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) def test_dcnm_intf_vpc_deleted_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_vpc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_vpc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_deleted_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='deleted', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("vpc_deleted_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 2) - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'] in ['vPC750', 'vPC751']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"] in ["vPC750", "vPC751"]), True) def test_dcnm_intf_vpc_replaced_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_vpc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_vpc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_replaced_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='replaced', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("vpc_replaced_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['replaced']), 2) - - changed_objs = ['PEER1_MEMBER_INTERFACES', 'PEER2_MEMBER_INTERFACES', - 'PC_MODE', 'BPDUGUARD_ENABLED', 'SPEED', - 'PORTTYPE_FAST_ENABLED', 'MTU', 'PEER1_ALLOWED_VLANS', - 'PEER2_ALLOWED_VLANS', 'PEER1_PO_DESC','PEER2_PO_DESC', 'ADMIN_STATE', - 'PEER1_ACCESS_VLAN', 'PEER2_ACCESS_VLAN', - 'PEER1_CONF', 'PEER2_CONF', 'INTF_NAME'] - - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - if_keys = list(intf['nvPairs'].keys()) - self.assertEqual ((set(if_keys).issubset(set(changed_objs))), True) + self.assertEqual(len(result["diff"][0]["replaced"]), 2) + + changed_objs = [ + "PEER1_MEMBER_INTERFACES", + "PEER2_MEMBER_INTERFACES", + "PC_MODE", + "BPDUGUARD_ENABLED", + "SPEED", + "PORTTYPE_FAST_ENABLED", + "MTU", + "PEER1_ALLOWED_VLANS", + "PEER2_ALLOWED_VLANS", + "PEER1_PO_DESC", + "PEER2_PO_DESC", + "ADMIN_STATE", + "PEER1_ACCESS_VLAN", + "PEER2_ACCESS_VLAN", + "PEER1_CONF", + "PEER2_CONF", + "INTF_NAME", + ] + + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + if_keys = list(intf["nvPairs"].keys()) + self.assertEqual((set(if_keys).issubset(set(changed_objs))), True) # All 4 will be deployed, even though we have not changed the monitor port - self.assertEqual(len(result['diff'][0]['deploy']), 2) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) def test_dcnm_intf_vpc_overridden_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_vpc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_vpc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_overridden_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='overridden', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("vpc_overridden_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) - del_if_names = ['port-channel301', 'port-channel302', 'port-channel303', - 'port-channel300', 'ethernet1/3.2', 'loopback200', 'vpc300'] + del_if_names = [ + "port-channel301", + "port-channel302", + "port-channel303", + "port-channel300", + "ethernet1/3.2", + "loopback200", + "vpc300", + ] - rep_if_names = ['ethernet1/3.2', 'ethernet1/1', 'ethernet1/2', 'ethernet3/2'] - ovr_if_names = ['vPC750'] + rep_if_names = ["ethernet1/3.2", "ethernet1/1", "ethernet1/2", "ethernet3/2"] + ovr_if_names = ["vPC750"] - for intf in result['diff'][0]['deleted']: - self.assertEqual ((intf['ifName'].lower() in del_if_names), True) + for intf in result["diff"][0]["deleted"]: + self.assertEqual((intf["ifName"].lower() in del_if_names), True) - for d in result['diff'][0]['replaced']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in rep_if_names), True) + for d in result["diff"][0]["replaced"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in rep_if_names), True) - for d in result['diff'][0]['overridden']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in ovr_if_names), True) + for d in result["diff"][0]["overridden"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in ovr_if_names), True) -#################################### GENERAL ############################ + # -------------------------- GENERAL -------------------------- def test_dcnm_intf_gen_missing_ip_sn(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = [] - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = [] + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual (result['msg'], 'Fabric test_fabric missing on DCNM or does not have any switches') - self.assertEqual (result['failed'], True) + self.assertEqual( + result["msg"], + "Fabric test_fabric missing on DCNM or does not have any switches", + ) + self.assertEqual(result["failed"], True) def test_dcnm_intf_mixed_intf_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_mixed_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_mixed_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('mixed_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("mixed_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual (result['changed'], True) + self.assertEqual(result["changed"], True) - self.assertEqual(len(result['diff'][0]['merged']), 5) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 0) - self.assertEqual(len(result['diff'][0]['deploy']), 5) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) def test_dcnm_intf_bunched_intf_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_bunched_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_bunched_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('bunched_merged_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("bunched_merged_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual (result['changed'], True) + self.assertEqual(result["changed"], True) - self.assertEqual(len(result['diff'][0]['merged']), 10) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 0) - self.assertEqual(len(result['diff'][0]['deploy']), 10) + self.assertEqual(len(result["diff"][0]["merged"]), 10) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 10) - if_names = ['port-channel300', 'port-channel400', 'port-channel301', - 'port-channel401', 'ethernet1/14', 'ethernet1/32', 'ethernet1/22', - 'ethernet1/13', 'vpc850', 'vpc750'] + if_names = [ + "port-channel300", + "port-channel400", + "port-channel301", + "port-channel401", + "ethernet1/14", + "ethernet1/32", + "ethernet1/22", + "ethernet1/13", + "vpc850", + "vpc750", + ] - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in if_names), True) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in if_names), True) - def test_dcnm_intf_type_missing_merged_new (self): + def test_dcnm_intf_type_missing_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_type_missing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("pc_type_missing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual (result['msg'], ' element, which is mandatory is missing in config') - self.assertEqual (result['failed'], True) - + self.assertEqual( + result["msg"], " element, which is mandatory is missing in config" + ) + self.assertEqual(result["failed"], True) - def test_dcnm_intf_missing_state (self): + def test_dcnm_intf_missing_state(self): - self.config_data = loadPlaybookData('dcnm_intf_pc_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_pc_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_pc_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_pc_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('pc_state_missing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') + self.playbook_config = self.config_data.get("pc_state_missing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") - set_module_args(dict(fabric='test_fabric', config=self.playbook_config)) + set_module_args(dict(fabric="test_fabric", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 0) - self.assertEqual(len(result['diff'][0]['deploy']), 1) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) - if_names = ['port-channel300'] + if_names = ["port-channel300"] - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in if_names), True) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in if_names), True) - def test_dcnm_intf_missing_peer_members (self): + def test_dcnm_intf_missing_peer_members(self): - self.config_data = loadPlaybookData('dcnm_intf_vpc_configs') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_vpc_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('vpc_members_missing_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - - set_module_args(dict(fabric='test_fabric', state='merged', config=self.playbook_config)) + self.playbook_config = self.config_data.get("vpc_members_missing_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(fabric="test_fabric", state="merged", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 0) - self.assertEqual(len(result['diff'][0]['deploy']), 1) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) - if_names = ['vpc751'] + if_names = ["vpc751"] - for d in result['diff'][0]['merged']: - for intf in d['interfaces']: - self.assertEqual ((intf['ifName'].lower() in if_names), True) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"].lower() in if_names), True) def test_dcnm_intf_query(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_intf_query_configs') - self.payloads_data = loadPlaybookData('dcnm_intf_query_payloads') - self.have_all_payloads_data = loadPlaybookData('dcnm_intf_have_all_payloads') + self.config_data = loadPlaybookData("dcnm_intf_query_configs") + self.payloads_data = loadPlaybookData("dcnm_intf_query_payloads") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") # load required config data - self.playbook_config = self.config_data.get('query_config') - self.playbook_mock_succ_resp = self.config_data.get('mock_succ_resp') - self.mock_ip_sn = self.config_data.get('mock_ip_sn') - self.mock_fab_inv = self.config_data.get('mock_fab_inv_data') - self.playbook_mock_vpc_resp = self.config_data.get('mock_vpc_resp') - - set_module_args(dict(state='query', - fabric='test_fabric', config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_config") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + + set_module_args( + dict(state="query", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) + self.assertEqual(result["changed"], False) - self.assertEqual (result['changed'], False) - - self.assertEqual(len(result['diff'][0]['merged']), 0) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - self.assertEqual(len(result['diff'][0]['replaced']), 0) - self.assertEqual(len(result['diff'][0]['deploy']), 0) - self.assertEqual(len(result['diff'][0]['query']), 6) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["replaced"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 6) diff --git a/tests/unit/modules/dcnm/test_dcnm_inventory.py b/tests/unit/modules/dcnm/test_dcnm_inventory.py index 2a3f5986d..3052f0267 100644 --- a/tests/unit/modules/dcnm/test_dcnm_inventory.py +++ b/tests/unit/modules/dcnm/test_dcnm_inventory.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -36,94 +35,152 @@ class TestDcnmInvModule(TestDcnmModule): fd = open("dcnm-ut", "w") - test_data = loadPlaybookData('dcnm_inventory') + test_data = loadPlaybookData("dcnm_inventory") SUCCESS_RETURN_CODE = 200 - playbook_merge_switch_config = test_data.get('playbook_merge_switch_config') - playbook_merge_role_switch_config = test_data.get('playbook_merge_role_switch_config') - playbook_merge_bf_switch_config = test_data.get('playbook_merge_bf_switch_config') - playbook_merge_multiple_switch_config = test_data.get('playbook_merge_multiple_switch_config') - playbook_merge_bf_multiple_switch_config = test_data.get('playbook_merge_bf_multiple_switch_config') - playbook_merge_bf_gf_multiple_switch_config = test_data.get('playbook_merge_bf_multiple_switch_config') - playbook_delete_switch_config = test_data.get('playbook_delete_switch_config') - playbook_delete_multiple_switch_config = test_data.get('playbook_delete_multiple_switch_config') - playbook_delete_all_switch_config = test_data.get('playbook_delete_all_switch_config') - playbook_override_switch_config = test_data.get('playbook_override_switch_config') - playbook_invalid_param_config = test_data.get('playbook_invalid_param_config') - playbook_invalid_discover_payload_config = test_data.get('playbook_invalid_discover_payload_config') - playbook_query_switch_config = test_data.get('playbook_query_switch_config') + playbook_merge_switch_config = test_data.get("playbook_merge_switch_config") + playbook_merge_role_switch_config = test_data.get( + "playbook_merge_role_switch_config" + ) + playbook_merge_bf_switch_config = test_data.get("playbook_merge_bf_switch_config") + playbook_merge_multiple_switch_config = test_data.get( + "playbook_merge_multiple_switch_config" + ) + playbook_merge_bf_multiple_switch_config = test_data.get( + "playbook_merge_bf_multiple_switch_config" + ) + playbook_merge_bf_gf_multiple_switch_config = test_data.get( + "playbook_merge_bf_multiple_switch_config" + ) + playbook_delete_switch_config = test_data.get("playbook_delete_switch_config") + playbook_delete_multiple_switch_config = test_data.get( + "playbook_delete_multiple_switch_config" + ) + playbook_delete_all_switch_config = test_data.get( + "playbook_delete_all_switch_config" + ) + playbook_override_switch_config = test_data.get("playbook_override_switch_config") + playbook_invalid_param_config = test_data.get("playbook_invalid_param_config") + playbook_invalid_discover_payload_config = test_data.get( + "playbook_invalid_discover_payload_config" + ) + playbook_query_switch_config = test_data.get("playbook_query_switch_config") # initial merge switch success - get_have_initial_success = test_data.get('get_have_initial_success') - get_have_two_switch_success = test_data.get('get_have_two_switch_success') - get_have_override_switch_success = test_data.get('get_have_override_switch_success') - get_have_null_config_switch_success = test_data.get('get_have_null_config_switch_success') - get_have_migration_switch_success = test_data.get('get_have_migration_switch_success') - get_have_already_created_switch_success = test_data.get('get_have_already_created_switch_success') - import_switch_discover_success = test_data.get('import_switch_discover_success') - get_inventory_initial_switch_success = test_data.get('get_inventory_initial_switch_success') - get_inventory_query_switch_success = test_data.get('get_inventory_query_switch_success') - get_inventory_query_no_switch_success = test_data.get('get_inventory_query_no_switch_success') - get_inventory_multiple_switch_success = test_data.get('get_inventory_multiple_switch_success') - get_inventory_multiple_bf_switch_success = test_data.get('get_inventory_multiple_bf_switch_success') - get_inventory_multiple_bf_gf_switch_success = test_data.get('get_inventory_multiple_bf_gf_switch_success') - get_inventory_override_switch_success = test_data.get('get_inventory_override_switch_success') - get_inventory_blank_success = test_data.get('get_inventory_blank_success') - rediscover_switch_success = test_data.get('rediscover_switch_success') - rediscover_switch107_success = test_data.get('rediscover_switch107_success') - get_lan_switch_cred_success = test_data.get('get_lan_switch_cred_success') - get_lan_multiple_switch_cred_success = test_data.get('get_lan_multiple_switch_cred_success') - get_lan_multiple_new_switch_cred_success = test_data.get('get_lan_multiple_new_switch_cred_success') - get_lan_multiple_new_bf_switch_cred_success = test_data.get('get_lan_multiple_new_bf_switch_cred_success') - get_lan_switch_override_cred_success = test_data.get('get_lan_switch_override_cred_success') - set_lan_switch_cred_success = test_data.get('set_lan_switch_cred_success') - set_assign_role_success = test_data.get('set_assign_role_success') - set_assign_bg_role_success = test_data.get('set_assign_bg_role_success') - get_fabric_id_success = test_data.get('get_fabric_id_success') - config_save_switch_success = test_data.get('config_save_switch_success') - config_deploy_switch_success = test_data.get('config_deploy_switch_success') + get_have_initial_success = test_data.get("get_have_initial_success") + get_have_two_switch_success = test_data.get("get_have_two_switch_success") + get_have_override_switch_success = test_data.get("get_have_override_switch_success") + get_have_null_config_switch_success = test_data.get( + "get_have_null_config_switch_success" + ) + get_have_migration_switch_success = test_data.get( + "get_have_migration_switch_success" + ) + get_have_already_created_switch_success = test_data.get( + "get_have_already_created_switch_success" + ) + import_switch_discover_success = test_data.get("import_switch_discover_success") + get_inventory_initial_switch_success = test_data.get( + "get_inventory_initial_switch_success" + ) + get_inventory_query_switch_success = test_data.get( + "get_inventory_query_switch_success" + ) + get_inventory_query_no_switch_success = test_data.get( + "get_inventory_query_no_switch_success" + ) + get_inventory_multiple_switch_success = test_data.get( + "get_inventory_multiple_switch_success" + ) + get_inventory_multiple_bf_switch_success = test_data.get( + "get_inventory_multiple_bf_switch_success" + ) + get_inventory_multiple_bf_gf_switch_success = test_data.get( + "get_inventory_multiple_bf_gf_switch_success" + ) + get_inventory_override_switch_success = test_data.get( + "get_inventory_override_switch_success" + ) + get_inventory_blank_success = test_data.get("get_inventory_blank_success") + rediscover_switch_success = test_data.get("rediscover_switch_success") + rediscover_switch107_success = test_data.get("rediscover_switch107_success") + get_lan_switch_cred_success = test_data.get("get_lan_switch_cred_success") + get_lan_multiple_switch_cred_success = test_data.get( + "get_lan_multiple_switch_cred_success" + ) + get_lan_multiple_new_switch_cred_success = test_data.get( + "get_lan_multiple_new_switch_cred_success" + ) + get_lan_multiple_new_bf_switch_cred_success = test_data.get( + "get_lan_multiple_new_bf_switch_cred_success" + ) + get_lan_switch_override_cred_success = test_data.get( + "get_lan_switch_override_cred_success" + ) + set_lan_switch_cred_success = test_data.get("set_lan_switch_cred_success") + set_assign_role_success = test_data.get("set_assign_role_success") + set_assign_bg_role_success = test_data.get("set_assign_bg_role_success") + get_fabric_id_success = test_data.get("get_fabric_id_success") + config_save_switch_success = test_data.get("config_save_switch_success") + config_deploy_switch_success = test_data.get("config_deploy_switch_success") # initial delete switch success - get_have_one_switch_success = test_data.get('get_have_one_switch_success') - delete_switch_success = test_data.get('delete_switch_success') - get_have_multiple_switch_success = test_data.get('get_have_multiple_switch_success') - delete_switch109_success = test_data.get('delete_switch109_success') - delete_switch107_success = test_data.get('delete_switch107_success') + get_have_one_switch_success = test_data.get("get_have_one_switch_success") + delete_switch_success = test_data.get("delete_switch_success") + get_have_multiple_switch_success = test_data.get("get_have_multiple_switch_success") + delete_switch109_success = test_data.get("delete_switch109_success") + delete_switch107_success = test_data.get("delete_switch107_success") # negative cases - get_have_initial_failure = test_data.get('get_have_initial_failure') - get_have_failure = test_data.get('get_have_failure') - import_switch_discover_failure = test_data.get('import_switch_discover_failure') - get_inventory_initial_switch_failure = test_data.get('get_inventory_initial_switch_failure') - rediscover_switch_failure = test_data.get('rediscover_switch_failure') - get_lan_switch_cred_failure = test_data.get('get_lan_switch_cred_failure') - set_lan_switch_cred_failure = test_data.get('set_lan_switch_cred_failure') - set_assign_role_failure = test_data.get('set_assign_role_failure') - get_fabric_id_failure = test_data.get('get_fabric_id_failure') - config_save_switch_failure = test_data.get('config_save_switch_failure') - config_deploy_switch_failure = test_data.get('config_deploy_switch_failure') - invalid_remove_switch = test_data.get('invalid_remove_switch') + get_have_initial_failure = test_data.get("get_have_initial_failure") + get_have_failure = test_data.get("get_have_failure") + import_switch_discover_failure = test_data.get("import_switch_discover_failure") + get_inventory_initial_switch_failure = test_data.get( + "get_inventory_initial_switch_failure" + ) + rediscover_switch_failure = test_data.get("rediscover_switch_failure") + get_lan_switch_cred_failure = test_data.get("get_lan_switch_cred_failure") + set_lan_switch_cred_failure = test_data.get("set_lan_switch_cred_failure") + set_assign_role_failure = test_data.get("set_assign_role_failure") + get_fabric_id_failure = test_data.get("get_fabric_id_failure") + config_save_switch_failure = test_data.get("config_save_switch_failure") + config_deploy_switch_failure = test_data.get("config_deploy_switch_failure") + invalid_remove_switch = test_data.get("invalid_remove_switch") def init_data(self): # Some of the mock data is re-initialized after each test as previous test might have altered portions # of the mock data. - self.mock_inv_discover_params = copy.deepcopy(self.test_data.get('mock_inv_discover_params')) - self.mock_inv_discover109_params = copy.deepcopy(self.test_data.get('mock_inv_discover109_params')) - self.mock_inv_discover107_params = copy.deepcopy(self.test_data.get('mock_inv_discover107_params')) - self.mock_inv_blank_discover_params = copy.deepcopy(self.test_data.get('mock_inv_blank_discover_params')) + self.mock_inv_discover_params = copy.deepcopy( + self.test_data.get("mock_inv_discover_params") + ) + self.mock_inv_discover109_params = copy.deepcopy( + self.test_data.get("mock_inv_discover109_params") + ) + self.mock_inv_discover107_params = copy.deepcopy( + self.test_data.get("mock_inv_discover107_params") + ) + self.mock_inv_blank_discover_params = copy.deepcopy( + self.test_data.get("mock_inv_blank_discover_params") + ) pass def setUp(self): super(TestDcnmInvModule, self).setUp() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.dcnm_send') + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.dcnm_send" + ) self.run_dcnm_send = self.mock_dcnm_send.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_fabric_details = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.get_fabric_details') + self.mock_dcnm_fabric_details = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_inventory.get_fabric_details" + ) self.run_dcnm_fabric_details = self.mock_dcnm_fabric_details.start() def tearDown(self): @@ -132,329 +189,394 @@ def tearDown(self): self.mock_dcnm_version_supported.stop() self.mock_dcnm_fabric_details.stop() - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): self.run_dcnm_version_supported.return_value = 11 self.run_dcnm_fabric_details.return_value = { - 'nvPairs': { - 'GRFIELD_DEBUG_FLAG': "Enable" - } + "nvPairs": {"GRFIELD_DEBUG_FLAG": "Enable"} } - if 'get_have_failure' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.get_have_initial_failure, self.get_have_failure] + if "get_have_failure" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.get_have_initial_failure, + self.get_have_failure, + ] - elif 'merge_switch' in self._testMethodName: + elif "merge_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'merge_role_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "merge_role_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_bg_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'merge_brownfield_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_bg_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "merge_brownfield_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'merge_multiple_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "merge_multiple_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover109_params, - self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.import_switch_discover_success, - self.get_inventory_multiple_switch_success, - self.get_inventory_multiple_switch_success, - self.get_inventory_multiple_switch_success, - self.rediscover_switch_success, - self.rediscover_switch_success, - self.get_inventory_multiple_switch_success, - self.get_inventory_multiple_switch_success, - self.get_lan_multiple_new_switch_cred_success, - self.set_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_multiple_switch_success, - self.set_assign_role_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'merge_multiple_brownfield_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover109_params, + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.import_switch_discover_success, + self.get_inventory_multiple_switch_success, + self.get_inventory_multiple_switch_success, + self.get_inventory_multiple_switch_success, + self.rediscover_switch_success, + self.rediscover_switch_success, + self.get_inventory_multiple_switch_success, + self.get_inventory_multiple_switch_success, + self.get_lan_multiple_new_switch_cred_success, + self.set_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_multiple_switch_success, + self.set_assign_role_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "merge_multiple_brownfield_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.mock_inv_discover107_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.import_switch_discover_success, - self.get_inventory_multiple_bf_switch_success, - self.get_inventory_multiple_bf_switch_success, - self.rediscover_switch_success, - self.rediscover_switch_success, - self.get_inventory_multiple_bf_switch_success, - self.get_inventory_multiple_bf_switch_success, - self.get_lan_multiple_new_bf_switch_cred_success, - self.set_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_multiple_bf_switch_success, - self.set_assign_role_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'merge_multiple_brown_green_field_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.mock_inv_discover107_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.import_switch_discover_success, + self.get_inventory_multiple_bf_switch_success, + self.get_inventory_multiple_bf_switch_success, + self.rediscover_switch_success, + self.rediscover_switch_success, + self.get_inventory_multiple_bf_switch_success, + self.get_inventory_multiple_bf_switch_success, + self.get_lan_multiple_new_bf_switch_cred_success, + self.set_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_multiple_bf_switch_success, + self.set_assign_role_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "merge_multiple_brown_green_field_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.mock_inv_discover107_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.import_switch_discover_success, - self.get_inventory_multiple_bf_gf_switch_success, - self.get_inventory_multiple_bf_gf_switch_success, - self.rediscover_switch107_success, - self.rediscover_switch_success, - self.get_inventory_multiple_bf_gf_switch_success, - self.get_inventory_multiple_bf_gf_switch_success, - self.get_lan_multiple_new_bf_switch_cred_success, - self.set_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_multiple_bf_gf_switch_success, - self.set_assign_role_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'delete_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.mock_inv_discover107_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.import_switch_discover_success, + self.get_inventory_multiple_bf_gf_switch_success, + self.get_inventory_multiple_bf_gf_switch_success, + self.rediscover_switch107_success, + self.rediscover_switch_success, + self.get_inventory_multiple_bf_gf_switch_success, + self.get_inventory_multiple_bf_gf_switch_success, + self.get_lan_multiple_new_bf_switch_cred_success, + self.set_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_multiple_bf_gf_switch_success, + self.set_assign_role_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "delete_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_one_switch_success, - self.delete_switch_success] + self.run_dcnm_send.side_effect = [ + self.get_have_one_switch_success, + self.delete_switch_success, + ] - elif 'delete_multiple_switch' in self._testMethodName: + elif "delete_multiple_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_multiple_switch_success, self.delete_switch109_success, - self.delete_switch_success] + self.run_dcnm_send.side_effect = [ + self.get_have_multiple_switch_success, + self.delete_switch109_success, + self.delete_switch_success, + ] - elif 'delete_all_switch' in self._testMethodName: + elif "delete_all_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_null_config_switch_success, self.delete_switch_success] + self.run_dcnm_send.side_effect = [ + self.get_have_null_config_switch_success, + self.delete_switch_success, + ] - elif 'query_switch' in self._testMethodName: + elif "query_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_one_switch_success, self.get_inventory_query_switch_success] + self.run_dcnm_send.side_effect = [ + self.get_have_one_switch_success, + self.get_inventory_query_switch_success, + ] - elif 'query_no_switch' in self._testMethodName: + elif "query_no_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_one_switch_success, self.get_inventory_query_no_switch_success] + self.run_dcnm_send.side_effect = [ + self.get_have_one_switch_success, + self.get_inventory_query_no_switch_success, + ] - elif 'override_switch' in self._testMethodName: + elif "override_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_override_switch_success, - self.delete_switch107_success, - self.import_switch_discover_success, - self.get_inventory_override_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_override_switch_success, - self.get_inventory_override_switch_success, - self.get_lan_switch_override_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_override_switch_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_success] - - elif 'migration_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_override_switch_success, + self.delete_switch107_success, + self.import_switch_discover_success, + self.get_inventory_override_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_override_switch_success, + self.get_inventory_override_switch_success, + self.get_lan_switch_override_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_override_switch_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "migration_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_migration_switch_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, self.get_inventory_initial_switch_success, - self.get_fabric_id_success, - self.config_save_switch_success, self.config_deploy_switch_success] - - elif 'invalid_param_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_migration_switch_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_inventory_initial_switch_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_success, + ] + + elif "invalid_param_switch" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif 'have_initial_failure' in self._testMethodName: + elif "have_initial_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_initial_failure] + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_failure, + ] - elif 'import_switch_discover_failure' in self._testMethodName: + elif "import_switch_discover_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_initial_success, - self.import_switch_discover_failure] + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_failure, + ] - elif 'get_inventory_initial_switch_failure' in self._testMethodName: + elif "get_inventory_initial_switch_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_failure] - - elif 'rediscover_switch_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_failure, + ] + + elif "rediscover_switch_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_failure] - - elif 'get_lan_switch_cred_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_failure, + ] + + elif "get_lan_switch_cred_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_failure] - - elif 'set_lan_switch_cred_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_failure, + ] + + elif "set_lan_switch_cred_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_failure] - - elif 'set_assign_role_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_failure, + ] + + elif "set_assign_role_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_failure] - - elif 'get_fabric_id_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_failure, + ] + + elif "get_fabric_id_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, self.get_fabric_id_failure] - - elif 'config_save_switch_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_fabric_id_failure, + ] + + elif "config_save_switch_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_failure] - - elif 'config_deploy_switch_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_failure, + ] + + elif "config_deploy_switch_failure" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, - self.get_have_initial_success, - self.import_switch_discover_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.rediscover_switch_success, - self.get_inventory_initial_switch_success, - self.get_inventory_initial_switch_success, - self.get_lan_switch_cred_success, - self.set_lan_switch_cred_success, - self.get_inventory_initial_switch_success, - self.set_assign_role_success, - self.get_fabric_id_success, - self.config_save_switch_success, - self.config_deploy_switch_failure] - - elif 'invalid_remove_switch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_initial_success, + self.import_switch_discover_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.rediscover_switch_success, + self.get_inventory_initial_switch_success, + self.get_inventory_initial_switch_success, + self.get_lan_switch_cred_success, + self.set_lan_switch_cred_success, + self.get_inventory_initial_switch_success, + self.set_assign_role_success, + self.get_fabric_id_success, + self.config_save_switch_success, + self.config_deploy_switch_failure, + ] + + elif "invalid_remove_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.get_have_one_switch_success, - self.invalid_remove_switch] + self.run_dcnm_send.side_effect = [ + self.get_have_one_switch_success, + self.invalid_remove_switch, + ] - elif 'blank_switch' in self._testMethodName: + elif "blank_switch" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif 'already_created_switch' in self._testMethodName: + elif "already_created_switch" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_inv_discover_params, self.get_have_already_created_switch_success] + self.run_dcnm_send.side_effect = [ + self.mock_inv_discover_params, + self.get_have_already_created_switch_success, + ] - elif 'already_deleted_switch' in self._testMethodName: + elif "already_deleted_switch" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [self.get_inventory_blank_success] @@ -462,251 +584,406 @@ def load_fixtures(self, response=None, device=''): pass def test_dcnm_inv_merge_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_merge_role_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_role_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_role_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_check_inv_merge_switch_fabric(self): - set_module_args(dict(state='merged', _ansible_check_mode=True, - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + _ansible_check_mode=True, + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + self.assertFalse(result.get("diff")) + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_merge_brownfield_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_bf_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_bf_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_merge_multiple_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_multiple_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_multiple_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_merge_multiple_brownfield_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_bf_multiple_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_bf_multiple_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_merge_multiple_brown_green_field_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_bf_gf_multiple_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_bf_gf_multiple_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_override_switch_fabric(self): - set_module_args(dict(state='overridden', - fabric='kharicha-fabric', config=self.playbook_override_switch_config)) + set_module_args( + dict( + state="overridden", + fabric="kharicha-fabric", + config=self.playbook_override_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_migration_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_delete_switch_fabric(self): - set_module_args(dict(state='deleted', - fabric='kharicha-fabric', config=self.playbook_delete_switch_config)) + set_module_args( + dict( + state="deleted", + fabric="kharicha-fabric", + config=self.playbook_delete_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_delete_multiple_switch_fabric(self): - set_module_args(dict(state='deleted', - fabric='kharicha-fabric', config=self.playbook_delete_multiple_switch_config)) + set_module_args( + dict( + state="deleted", + fabric="kharicha-fabric", + config=self.playbook_delete_multiple_switch_config, + ) + ) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_delete_all_switch_fabric(self): - set_module_args(dict(state='deleted', - fabric='kharicha-fabric')) + set_module_args(dict(state="deleted", fabric="kharicha-fabric")) result = self.execute_module(changed=True, failed=False) - for resp in result['response']: - self.assertEqual(resp['RETURN_CODE'], 200) - self.assertEqual(resp['MESSAGE'], 'OK') + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual(resp["MESSAGE"], "OK") def test_dcnm_inv_invalid_param_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_invalid_param_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_invalid_param_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Invalid parameters in playbook: password: : The string exceeds the allowed range of max 32 char') + self.assertEqual( + result.get("msg"), + "Invalid parameters in playbook: password: : The string exceeds the allowed range of max 32 char", + ) def test_dcnm_inv_have_initial_failure_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_invalid_discover_payload_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_invalid_discover_payload_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Unable to find inventories under fabric: kharicha-fabric') + self.assertEqual( + result.get("msg"), + "Unable to find inventories under fabric: kharicha-fabric", + ) def test_dcnm_inv_import_switch_discover_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'import switch discover failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "import switch discover failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_get_inventory_initial_switch_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Unable to find inventories under fabric: kharicha-fabric') + self.assertEqual( + result.get("msg"), + "Unable to find inventories under fabric: kharicha-fabric", + ) def test_dcnm_inv_rediscover_switch_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'rediscover switch failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "rediscover switch failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_get_lan_switch_cred_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Unable to getLanSwitchCredentials under fabric: kharicha-fabric') + self.assertEqual( + result.get("msg"), + "Unable to getLanSwitchCredentials under fabric: kharicha-fabric", + ) def test_dcnm_inv_set_lan_switch_cred_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'set lan switch credentials failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "set lan switch credentials failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_set_assign_role_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'set assign role failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "set assign role failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_config_save_switch_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'config save switch failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "config save switch failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_config_deploy_switch_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'config deploy switch failure') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "config deploy switch failure") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_invalid_remove_switch_fabric(self): - set_module_args(dict(state='deleted', - fabric='kharicha-fabric', config=self.playbook_delete_switch_config)) + set_module_args( + dict( + state="deleted", + fabric="kharicha-fabric", + config=self.playbook_delete_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['DATA'], 'Delete switch failed') - self.assertEqual(result['msg']['MESSAGE'], 'Not OK') - self.assertEqual(result['msg']['RETURN_CODE'], 400) + self.assertEqual(result["msg"]["DATA"], "Delete switch failed") + self.assertEqual(result["msg"]["MESSAGE"], "Not OK") + self.assertEqual(result["msg"]["RETURN_CODE"], 400) def test_dcnm_inv_blank_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric')) + set_module_args(dict(state="merged", fabric="kharicha-fabric")) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg'], 'config: element is mandatory for this state merged') + self.assertEqual( + result["msg"], "config: element is mandatory for this state merged" + ) def test_dcnm_inv_already_created_switch_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(result['response'], 'The switch provided is already part of the fabric and cannot be created again') + self.assertEqual( + result["response"], + "The switch provided is already part of the fabric and cannot be created again", + ) def test_dcnm_inv_already_deleted_switch_fabric(self): - set_module_args(dict(state='deleted', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="deleted", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(result['response'], - 'The switch provided is not part of the fabric and cannot be deleted') + self.assertEqual( + result["response"], + "The switch provided is not part of the fabric and cannot be deleted", + ) def test_dcnm_inv_get_have_failure_fabric(self): - set_module_args(dict(state='merged', - fabric='kharicha-fabric', config=self.playbook_merge_switch_config)) + set_module_args( + dict( + state="merged", + fabric="kharicha-fabric", + config=self.playbook_merge_switch_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg'], - 'Fabric kharicha-fabric not present on DCNM') + self.assertEqual(result["msg"], "Fabric kharicha-fabric not present on DCNM") def test_dcnm_inv_query_switch_fabric(self): - set_module_args(dict(state='query', - fabric='kharicha-fabric', config=self.playbook_query_switch_config)) + set_module_args( + dict( + state="query", + fabric="kharicha-fabric", + config=self.playbook_query_switch_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(result['response'][0]['ipAddress'], '192.168.1.110') - self.assertEqual(result['response'][0]['switchRole'], 'leaf') + self.assertEqual(result["response"][0]["ipAddress"], "192.168.1.110") + self.assertEqual(result["response"][0]["switchRole"], "leaf") def test_dcnm_inv_query_no_switch_fabric(self): - set_module_args(dict(state='query', - fabric='kharicha-fabric', config=self.playbook_query_switch_config)) + set_module_args( + dict( + state="query", + fabric="kharicha-fabric", + config=self.playbook_query_switch_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(result['response'], 'The queried switch is not part of the fabric configured') + self.assertEqual( + result["response"], + "The queried switch is not part of the fabric configured", + ) diff --git a/tests/unit/modules/dcnm/test_dcnm_policy.py b/tests/unit/modules/dcnm/test_dcnm_policy.py index 7ee70d5ea..0a3f922d6 100644 --- a/tests/unit/modules/dcnm/test_dcnm_policy.py +++ b/tests/unit/modules/dcnm/test_dcnm_policy.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -23,7 +22,9 @@ from ansible_collections.cisco.dcnm.plugins.modules import dcnm_policy from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy + class TestDcnmPolicyModule(TestDcnmModule): @@ -34,28 +35,36 @@ class TestDcnmPolicyModule(TestDcnmModule): def init_data(self): pass - def log_msg (self, msg): + def log_msg(self, msg): if fd is None: fd = open("policy-ut.log", "w+") - self.fd.write (msg) + self.fd.write(msg) self.fd.flush() def setUp(self): super(TestDcnmPolicyModule, self).setUp() - self.mock_dcnm_fabric_details = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.get_fabric_inventory_details') + self.mock_dcnm_fabric_details = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.get_fabric_inventory_details" + ) self.run_dcnm_fabric_details = self.mock_dcnm_fabric_details.start() - self.mock_dcnm_ip_sn = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.get_ip_sn_dict') + self.mock_dcnm_ip_sn = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.get_ip_sn_dict" + ) self.run_dcnm_ip_sn = self.mock_dcnm_ip_sn.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.dcnm_send') - self.run_dcnm_send = self.mock_dcnm_send.start() + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_policy.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() def tearDown(self): @@ -63,318 +72,447 @@ def tearDown(self): self.mock_dcnm_send.stop() self.mock_dcnm_version_supported.stop() -#################################### FIXTURES ############################ + # -------------------------- FIXTURES -------------------------- - def load_policy_fixtures (self): + def load_policy_fixtures(self): - if ('test_dcnm_policy_merged_new' == self._testMethodName): + if "test_dcnm_policy_merged_new" == self._testMethodName: - create_succ_resp1 = self.payloads_data.get('success_create_response_101') - create_succ_resp2 = self.payloads_data.get('success_create_response_102') - create_succ_resp3 = self.payloads_data.get('success_create_response_103') - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_101_105') - have_all_resp = self.payloads_data.get('policy_have_all_resp') + create_succ_resp1 = self.payloads_data.get("success_create_response_101") + create_succ_resp2 = self.payloads_data.get("success_create_response_102") + create_succ_resp3 = self.payloads_data.get("success_create_response_103") + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_101_105") + have_all_resp = self.payloads_data.get("policy_have_all_resp") - self.run_dcnm_send.side_effect = [have_all_resp, - create_succ_resp1, create_succ_resp2, - create_succ_resp3, create_succ_resp4, - create_succ_resp5, - deploy_succ_resp] + self.run_dcnm_send.side_effect = [ + have_all_resp, + create_succ_resp1, + create_succ_resp2, + create_succ_resp3, + create_succ_resp4, + create_succ_resp5, + deploy_succ_resp, + ] + + if "test_dcnm_policy_merged_same_template" == self._testMethodName: + + have_101_105_resp = self.payloads_data.get("have_response_101_105") + create_succ_resp1 = self.payloads_data.get("success_create_response_101") + deploy_succ_resp = self.payloads_data.get( + "success_deploy_response_101_101_5" + ) - if ('test_dcnm_policy_merged_same_template' == self._testMethodName): + self.run_dcnm_send.side_effect = [ + have_101_105_resp, + create_succ_resp1, + create_succ_resp1, + create_succ_resp1, + create_succ_resp1, + create_succ_resp1, + deploy_succ_resp, + ] - have_101_105_resp = self.payloads_data.get('have_response_101_105') - create_succ_resp1 = self.payloads_data.get('success_create_response_101') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_101_101_5') + if "test_dcnm_policy_merged_new_check_mode" == self._testMethodName: - self.run_dcnm_send.side_effect = [have_101_105_resp, - create_succ_resp1, create_succ_resp1, - create_succ_resp1, create_succ_resp1, - create_succ_resp1, - deploy_succ_resp] + have_all_resp = self.payloads_data.get("policy_have_all_resp") - if ('test_dcnm_policy_merged_new_check_mode' == self._testMethodName): + self.run_dcnm_send.side_effect = [have_all_resp] - have_all_resp = self.payloads_data.get('policy_have_all_resp') + if "test_dcnm_policy_merged_existing" == self._testMethodName: - self.run_dcnm_send.side_effect = [have_all_resp] + create_succ_resp1 = self.payloads_data.get("success_create_response_101") + create_succ_resp2 = self.payloads_data.get("success_create_response_102") + create_succ_resp3 = self.payloads_data.get("success_create_response_103") + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_101_105") + have_101_105_resp = self.payloads_data.get("have_response_101_105") + self.run_dcnm_send.side_effect = [have_101_105_resp, deploy_succ_resp] - if ('test_dcnm_policy_merged_existing' == self._testMethodName): + if "test_dcnm_policy_merged_existing_and_non_exist" == self._testMethodName: - create_succ_resp1 = self.payloads_data.get('success_create_response_101') - create_succ_resp2 = self.payloads_data.get('success_create_response_102') - create_succ_resp3 = self.payloads_data.get('success_create_response_103') - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_101_105') - have_101_105_resp = self.payloads_data.get('have_response_101_105') + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_101_105") + have_101_103_resp = self.payloads_data.get("have_response_101_103") - self.run_dcnm_send.side_effect = [have_101_105_resp, - deploy_succ_resp] + self.run_dcnm_send.side_effect = [ + have_101_103_resp, + create_succ_resp4, + create_succ_resp5, + deploy_succ_resp, + ] - if ('test_dcnm_policy_merged_existing_and_non_exist' == self._testMethodName): + if "test_dcnm_policy_without_state" == self._testMethodName: - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_101_105') - have_101_103_resp = self.payloads_data.get('have_response_101_103') + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_104_105") - self.run_dcnm_send.side_effect = [have_101_103_resp, - create_succ_resp4, create_succ_resp5, - deploy_succ_resp] + self.run_dcnm_send.side_effect = [ + [], + create_succ_resp4, + create_succ_resp5, + deploy_succ_resp, + ] + + if "test_dcnm_policy_merge_additional_policies" == self._testMethodName: + + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp4_1 = self.payloads_data.get( + "success_create_response_104_1" + ) + deploy_succ_resp = self.payloads_data.get( + "success_deploy_response_104_104_1" + ) - if ('test_dcnm_policy_without_state' == self._testMethodName): + self.run_dcnm_send.side_effect = [ + [], + create_succ_resp4, + create_succ_resp4_1, + deploy_succ_resp, + ] + + if "test_dcnm_policy_merge_additional_policies_exist" == self._testMethodName: + + have_resp_104 = self.payloads_data.get("have_response_104") + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp4_1 = self.payloads_data.get( + "success_create_response_104_1" + ) + deploy_succ_resp = self.payloads_data.get( + "success_deploy_response_104_104_1" + ) - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_104_105') + self.run_dcnm_send.side_effect = [ + have_resp_104, + create_succ_resp4, + create_succ_resp4_1, + deploy_succ_resp, + ] + + if "test_dcnm_policy_merge_multiple_switches" == self._testMethodName: + + create_succ_resp12 = self.payloads_data.get( + "success_create_response_101_sw2" + ) + create_succ_resp13 = self.payloads_data.get( + "success_create_response_101_sw3" + ) + create_succ_resp22 = self.payloads_data.get( + "success_create_response_102_sw2" + ) + create_succ_resp23 = self.payloads_data.get( + "success_create_response_102_sw3" + ) + create_succ_resp32 = self.payloads_data.get( + "success_create_response_103_sw2" + ) + create_succ_resp33 = self.payloads_data.get( + "success_create_response_103_sw3" + ) + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp_multi_sw = self.payloads_data.get( + "success_deploy_response_101_105_multi_switch" + ) - self.run_dcnm_send.side_effect = [[], - create_succ_resp4, create_succ_resp5, - deploy_succ_resp] + self.run_dcnm_send.side_effect = [ + [], + create_succ_resp12, + create_succ_resp13, + create_succ_resp22, + create_succ_resp23, + create_succ_resp32, + create_succ_resp33, + create_succ_resp4, + create_succ_resp5, + deploy_succ_resp_multi_sw, + ] + + if "test_dcnm_policy_merge_deploy_false" == self._testMethodName: + + create_succ_resp4 = self.payloads_data.get("success_create_response_104") - if ('test_dcnm_policy_merge_additional_policies' == self._testMethodName): + self.run_dcnm_send.side_effect = [[], create_succ_resp4] - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp4_1 = self.payloads_data.get('success_create_response_104_1') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_104_104_1') + if "test_dcnm_policy_merge_no_deploy" == self._testMethodName: - self.run_dcnm_send.side_effect = [[], - create_succ_resp4, create_succ_resp4_1, - deploy_succ_resp] + create_succ_resp1 = self.payloads_data.get("success_create_response_101") + create_succ_resp2 = self.payloads_data.get("success_create_response_102") + create_succ_resp3 = self.payloads_data.get("success_create_response_103") + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + create_succ_resp5 = self.payloads_data.get("success_create_response_105") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_101_105") + have_all_resp = self.payloads_data.get("policy_have_all_resp") - if ('test_dcnm_policy_merge_additional_policies_exist' == self._testMethodName): + self.run_dcnm_send.side_effect = [ + have_all_resp, + create_succ_resp1, + create_succ_resp2, + create_succ_resp3, + create_succ_resp4, + create_succ_resp5, + deploy_succ_resp, + ] + + if "test_dcnm_policy_merged_new_with_vars" == self._testMethodName: + + create_succ_resp1 = self.payloads_data.get("success_create_response_125") + create_succ_resp2 = self.payloads_data.get("success_create_response_126") + create_succ_resp3 = self.payloads_data.get("success_create_response_127") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_125_127") + have_all_resp = self.payloads_data.get("policy_have_all_resp") - have_resp_104 = self.payloads_data.get('have_response_104') - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp4_1 = self.payloads_data.get('success_create_response_104_1') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_104_104_1') + self.run_dcnm_send.side_effect = [ + have_all_resp, + create_succ_resp1, + create_succ_resp2, + create_succ_resp3, + deploy_succ_resp, + ] - self.run_dcnm_send.side_effect = [have_resp_104, - create_succ_resp4, create_succ_resp4_1, - deploy_succ_resp] + if "test_dcnm_policy_modify_with_template_name" == self._testMethodName: - if ('test_dcnm_policy_merge_multiple_switches' == self._testMethodName): + deploy_succ_resp = self.payloads_data.get("success_deploy_response_104") + have_all_resp = self.payloads_data.get("have_response_101_105") + create_succ_resp1 = self.payloads_data.get("success_create_response_101") - create_succ_resp12 = self.payloads_data.get('success_create_response_101_sw2') - create_succ_resp13 = self.payloads_data.get('success_create_response_101_sw3') - create_succ_resp22 = self.payloads_data.get('success_create_response_102_sw2') - create_succ_resp23 = self.payloads_data.get('success_create_response_102_sw3') - create_succ_resp32 = self.payloads_data.get('success_create_response_103_sw2') - create_succ_resp33 = self.payloads_data.get('success_create_response_103_sw3') - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp_multi_sw = self.payloads_data.get('success_deploy_response_101_105_multi_switch') + self.run_dcnm_send.side_effect = [ + have_all_resp, + create_succ_resp1, + deploy_succ_resp, + ] - self.run_dcnm_send.side_effect = [[], - create_succ_resp12, create_succ_resp13, - create_succ_resp22, create_succ_resp23, - create_succ_resp32, create_succ_resp33, - create_succ_resp4, create_succ_resp5, - deploy_succ_resp_multi_sw] + if "test_dcnm_policy_modify_with_policy_id" == self._testMethodName: - if ('test_dcnm_policy_merge_deploy_false' == self._testMethodName): + create_succ_resp4 = self.payloads_data.get("success_create_response_104") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_104") + get_response_104 = self.payloads_data.get("get_response_104") + have_all_resp = self.payloads_data.get("have_response_101_105") - create_succ_resp4 = self.payloads_data.get('success_create_response_104') + self.run_dcnm_send.side_effect = [ + get_response_104, + have_all_resp, + create_succ_resp4, + deploy_succ_resp, + ] - self.run_dcnm_send.side_effect = [[], create_succ_resp4] + if "test_dcnm_policy_modify_policy_with_vars" == self._testMethodName: - if ('test_dcnm_policy_merge_no_deploy' == self._testMethodName): - - create_succ_resp1 = self.payloads_data.get('success_create_response_101') - create_succ_resp2 = self.payloads_data.get('success_create_response_102') - create_succ_resp3 = self.payloads_data.get('success_create_response_103') - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - create_succ_resp5 = self.payloads_data.get('success_create_response_105') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_101_105') - have_all_resp = self.payloads_data.get('policy_have_all_resp') - - self.run_dcnm_send.side_effect = [have_all_resp, - create_succ_resp1, create_succ_resp2, - create_succ_resp3, create_succ_resp4, - create_succ_resp5, - deploy_succ_resp] - - if ('test_dcnm_policy_merged_new_with_vars' == self._testMethodName): - - create_succ_resp1 = self.payloads_data.get('success_create_response_125') - create_succ_resp2 = self.payloads_data.get('success_create_response_126') - create_succ_resp3 = self.payloads_data.get('success_create_response_127') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_125_127') - have_all_resp = self.payloads_data.get('policy_have_all_resp') - - self.run_dcnm_send.side_effect = [have_all_resp, - create_succ_resp1, create_succ_resp2, - create_succ_resp3, - deploy_succ_resp] - - if ('test_dcnm_policy_modify_with_template_name' == self._testMethodName): - - deploy_succ_resp = self.payloads_data.get('success_deploy_response_104') - have_all_resp = self.payloads_data.get('have_response_101_105') - create_succ_resp1 = self.payloads_data.get('success_create_response_101') - - self.run_dcnm_send.side_effect = [have_all_resp, - create_succ_resp1, - deploy_succ_resp] - - if ('test_dcnm_policy_modify_with_policy_id' == self._testMethodName): - - create_succ_resp4 = self.payloads_data.get('success_create_response_104') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_104') - get_response_104 = self.payloads_data.get('get_response_104') - have_all_resp = self.payloads_data.get('have_response_101_105') - - self.run_dcnm_send.side_effect = [get_response_104, have_all_resp, - create_succ_resp4, - deploy_succ_resp] - - if ('test_dcnm_policy_modify_policy_with_vars' == self._testMethodName): - - create_succ_resp1 = self.payloads_data.get('success_create_response_125') - deploy_succ_resp = self.payloads_data.get('success_deploy_response_125') - have_all_resp = self.payloads_data.get('have_response_125') - get_response_125 = self.payloads_data.get('get_response_125') - - self.run_dcnm_send.side_effect = [get_response_125, have_all_resp, - create_succ_resp1, - deploy_succ_resp] - - if ('test_dcnm_policy_delete_with_template_name' == self._testMethodName): - - have_resp_101_105 = self.payloads_data.get('have_response_101_105') - mark_delete_resp_101 = self.payloads_data.get('mark_delete_response_101') - mark_delete_resp_102 = self.payloads_data.get('mark_delete_response_102') - mark_delete_resp_103 = self.payloads_data.get('mark_delete_response_103') - mark_delete_resp_104 = self.payloads_data.get('mark_delete_response_104') - mark_delete_resp_105 = self.payloads_data.get('mark_delete_response_105') - delete_config_save_resp = self.payloads_data.get('delete_config_deploy_response_101_105') - config_preview = self.payloads_data.get('config_preview') - - self.run_dcnm_send.side_effect = [have_resp_101_105, - mark_delete_resp_101, mark_delete_resp_102, - mark_delete_resp_103, mark_delete_resp_104, - mark_delete_resp_105, delete_config_save_resp, - config_preview, - [], [], [], [], [], - ] - - if ('test_dcnm_policy_delete_with_policy_id' == self._testMethodName): - - get_response_101 = self.payloads_data.get('get_response_101') - get_response_102 = self.payloads_data.get('get_response_102') - get_response_103 = self.payloads_data.get('get_response_103') - get_response_104 = self.payloads_data.get('get_response_104') - get_response_105 = self.payloads_data.get('get_response_105') - have_resp_101_105 = self.payloads_data.get('have_response_101_105') - mark_delete_resp_101 = self.payloads_data.get('mark_delete_response_101') - mark_delete_resp_102 = self.payloads_data.get('mark_delete_response_102') - mark_delete_resp_103 = self.payloads_data.get('mark_delete_response_103') - mark_delete_resp_104 = self.payloads_data.get('mark_delete_response_104') - mark_delete_resp_105 = self.payloads_data.get('mark_delete_response_105') - delete_config_save_resp = self.payloads_data.get('delete_config_deploy_response_101_105') - config_preview = self.payloads_data.get('config_preview') + create_succ_resp1 = self.payloads_data.get("success_create_response_125") + deploy_succ_resp = self.payloads_data.get("success_deploy_response_125") + have_all_resp = self.payloads_data.get("have_response_125") + get_response_125 = self.payloads_data.get("get_response_125") self.run_dcnm_send.side_effect = [ - get_response_101, get_response_102, - get_response_103, get_response_104, - get_response_105, have_resp_101_105, - mark_delete_resp_101, mark_delete_resp_102, - mark_delete_resp_103, mark_delete_resp_104, - mark_delete_resp_105, delete_config_save_resp, - config_preview, - [], [], [], [], [], - ] - if ('test_dcnm_policy_delete_multiple_policies_with_template_name' == self._testMethodName): - - have_resp_101_105_multi = self.payloads_data.get('have_response_101_105_multi') - mark_delete_resp_101 = self.payloads_data.get('mark_delete_response_101') - mark_delete_resp_102 = self.payloads_data.get('mark_delete_response_102') - mark_delete_resp_103 = self.payloads_data.get('mark_delete_response_103') - mark_delete_resp_104 = self.payloads_data.get('mark_delete_response_104') - mark_delete_resp_105 = self.payloads_data.get('mark_delete_response_105') - delete_config_save_resp = self.payloads_data.get('delete_config_deploy_response_101_105') - config_preview = self.payloads_data.get('config_preview') - - self.run_dcnm_send.side_effect = [have_resp_101_105_multi, - mark_delete_resp_101, mark_delete_resp_101, - mark_delete_resp_101, mark_delete_resp_102, - mark_delete_resp_102, mark_delete_resp_103, - mark_delete_resp_104, mark_delete_resp_105, - delete_config_save_resp, - config_preview, - [], [], [], [], [], [], [], [], - ] - - - if ('test_dcnm_policy_delete_with_template_name_with_second_delete' == self._testMethodName): - - have_resp_101_105 = self.payloads_data.get('have_response_101_105') - get_response_101 = self.payloads_data.get('get_response_101') - get_response_102 = self.payloads_data.get('get_response_102') - get_response_103 = self.payloads_data.get('get_response_103') - get_response_104 = self.payloads_data.get('get_response_104') - get_response_105 = self.payloads_data.get('get_response_105') - mark_delete_resp_101 = self.payloads_data.get('mark_delete_response_101') - mark_delete_resp_102 = self.payloads_data.get('mark_delete_response_102') - mark_delete_resp_103 = self.payloads_data.get('mark_delete_response_103') - mark_delete_resp_104 = self.payloads_data.get('mark_delete_response_104') - mark_delete_resp_105 = self.payloads_data.get('mark_delete_response_105') - delete_config_save_resp = self.payloads_data.get('delete_config_deploy_response_101_105') - config_preview = self.payloads_data.get('config_preview') - delete_resp_101 = self.payloads_data.get('delete_response_101') - delete_resp_102 = self.payloads_data.get('delete_response_102') - delete_resp_103 = self.payloads_data.get('delete_response_103') - delete_resp_104 = self.payloads_data.get('delete_response_104') - delete_resp_105 = self.payloads_data.get('delete_response_105') - - self.run_dcnm_send.side_effect = [have_resp_101_105, - mark_delete_resp_101, mark_delete_resp_102, - mark_delete_resp_103, mark_delete_resp_104, - mark_delete_resp_105, delete_config_save_resp, - config_preview, - get_response_101, delete_resp_101, - get_response_102, delete_resp_102, - get_response_103, delete_resp_103, - get_response_104, delete_resp_104, - get_response_105, delete_resp_105, - delete_config_save_resp - ] - - if ('test_dcnm_policy_query_with_switch_info' == self._testMethodName): - - have_resp_101_105 = self.payloads_data.get('have_response_101_105') - - self.run_dcnm_send.side_effect = [have_resp_101_105, - ] - if ('test_dcnm_policy_query_with_policy_id' == self._testMethodName): - - get_resp_101 = self.payloads_data.get('get_response_101') - get_resp_102 = self.payloads_data.get('get_response_102') - get_resp_103 = self.payloads_data.get('get_response_103') - get_resp_104 = self.payloads_data.get('get_response_104') - get_resp_105 = self.payloads_data.get('get_response_105') - - self.run_dcnm_send.side_effect = [get_resp_101, - get_resp_102, get_resp_103, get_resp_104, - get_resp_105 - ] - if ('test_dcnm_policy_query_with_template_name' == self._testMethodName): - - have_resp_101_105 = self.payloads_data.get('have_response_101_105') - - self.run_dcnm_send.side_effect = [have_resp_101_105, - ] - if ('test_dcnm_policy_query_with_template_name_match_multi' == self._testMethodName): - - have_resp_101_105_multi = self.payloads_data.get('have_response_101_105_multi') - - self.run_dcnm_send.side_effect = [have_resp_101_105_multi, - ] - def load_fixtures(self, response=None, device=''): + get_response_125, + have_all_resp, + create_succ_resp1, + deploy_succ_resp, + ] + + if "test_dcnm_policy_delete_with_template_name" == self._testMethodName: + + have_resp_101_105 = self.payloads_data.get("have_response_101_105") + mark_delete_resp_101 = self.payloads_data.get("mark_delete_response_101") + mark_delete_resp_102 = self.payloads_data.get("mark_delete_response_102") + mark_delete_resp_103 = self.payloads_data.get("mark_delete_response_103") + mark_delete_resp_104 = self.payloads_data.get("mark_delete_response_104") + mark_delete_resp_105 = self.payloads_data.get("mark_delete_response_105") + delete_config_save_resp = self.payloads_data.get( + "delete_config_deploy_response_101_105" + ) + config_preview = self.payloads_data.get("config_preview") + + self.run_dcnm_send.side_effect = [ + have_resp_101_105, + mark_delete_resp_101, + mark_delete_resp_102, + mark_delete_resp_103, + mark_delete_resp_104, + mark_delete_resp_105, + delete_config_save_resp, + config_preview, + [], + [], + [], + [], + [], + ] + + if "test_dcnm_policy_delete_with_policy_id" == self._testMethodName: + + get_response_101 = self.payloads_data.get("get_response_101") + get_response_102 = self.payloads_data.get("get_response_102") + get_response_103 = self.payloads_data.get("get_response_103") + get_response_104 = self.payloads_data.get("get_response_104") + get_response_105 = self.payloads_data.get("get_response_105") + have_resp_101_105 = self.payloads_data.get("have_response_101_105") + mark_delete_resp_101 = self.payloads_data.get("mark_delete_response_101") + mark_delete_resp_102 = self.payloads_data.get("mark_delete_response_102") + mark_delete_resp_103 = self.payloads_data.get("mark_delete_response_103") + mark_delete_resp_104 = self.payloads_data.get("mark_delete_response_104") + mark_delete_resp_105 = self.payloads_data.get("mark_delete_response_105") + delete_config_save_resp = self.payloads_data.get( + "delete_config_deploy_response_101_105" + ) + config_preview = self.payloads_data.get("config_preview") + + self.run_dcnm_send.side_effect = [ + get_response_101, + get_response_102, + get_response_103, + get_response_104, + get_response_105, + have_resp_101_105, + mark_delete_resp_101, + mark_delete_resp_102, + mark_delete_resp_103, + mark_delete_resp_104, + mark_delete_resp_105, + delete_config_save_resp, + config_preview, + [], + [], + [], + [], + [], + ] + if ( + "test_dcnm_policy_delete_multiple_policies_with_template_name" + == self._testMethodName + ): + + have_resp_101_105_multi = self.payloads_data.get( + "have_response_101_105_multi" + ) + mark_delete_resp_101 = self.payloads_data.get("mark_delete_response_101") + mark_delete_resp_102 = self.payloads_data.get("mark_delete_response_102") + mark_delete_resp_103 = self.payloads_data.get("mark_delete_response_103") + mark_delete_resp_104 = self.payloads_data.get("mark_delete_response_104") + mark_delete_resp_105 = self.payloads_data.get("mark_delete_response_105") + delete_config_save_resp = self.payloads_data.get( + "delete_config_deploy_response_101_105" + ) + config_preview = self.payloads_data.get("config_preview") + + self.run_dcnm_send.side_effect = [ + have_resp_101_105_multi, + mark_delete_resp_101, + mark_delete_resp_101, + mark_delete_resp_101, + mark_delete_resp_102, + mark_delete_resp_102, + mark_delete_resp_103, + mark_delete_resp_104, + mark_delete_resp_105, + delete_config_save_resp, + config_preview, + [], + [], + [], + [], + [], + [], + [], + [], + ] + + if ( + "test_dcnm_policy_delete_with_template_name_with_second_delete" + == self._testMethodName + ): + + have_resp_101_105 = self.payloads_data.get("have_response_101_105") + get_response_101 = self.payloads_data.get("get_response_101") + get_response_102 = self.payloads_data.get("get_response_102") + get_response_103 = self.payloads_data.get("get_response_103") + get_response_104 = self.payloads_data.get("get_response_104") + get_response_105 = self.payloads_data.get("get_response_105") + mark_delete_resp_101 = self.payloads_data.get("mark_delete_response_101") + mark_delete_resp_102 = self.payloads_data.get("mark_delete_response_102") + mark_delete_resp_103 = self.payloads_data.get("mark_delete_response_103") + mark_delete_resp_104 = self.payloads_data.get("mark_delete_response_104") + mark_delete_resp_105 = self.payloads_data.get("mark_delete_response_105") + delete_config_save_resp = self.payloads_data.get( + "delete_config_deploy_response_101_105" + ) + config_preview = self.payloads_data.get("config_preview") + delete_resp_101 = self.payloads_data.get("delete_response_101") + delete_resp_102 = self.payloads_data.get("delete_response_102") + delete_resp_103 = self.payloads_data.get("delete_response_103") + delete_resp_104 = self.payloads_data.get("delete_response_104") + delete_resp_105 = self.payloads_data.get("delete_response_105") + + self.run_dcnm_send.side_effect = [ + have_resp_101_105, + mark_delete_resp_101, + mark_delete_resp_102, + mark_delete_resp_103, + mark_delete_resp_104, + mark_delete_resp_105, + delete_config_save_resp, + config_preview, + get_response_101, + delete_resp_101, + get_response_102, + delete_resp_102, + get_response_103, + delete_resp_103, + get_response_104, + delete_resp_104, + get_response_105, + delete_resp_105, + delete_config_save_resp, + ] + + if "test_dcnm_policy_query_with_switch_info" == self._testMethodName: + + have_resp_101_105 = self.payloads_data.get("have_response_101_105") + + self.run_dcnm_send.side_effect = [ + have_resp_101_105, + ] + if "test_dcnm_policy_query_with_policy_id" == self._testMethodName: + + get_resp_101 = self.payloads_data.get("get_response_101") + get_resp_102 = self.payloads_data.get("get_response_102") + get_resp_103 = self.payloads_data.get("get_response_103") + get_resp_104 = self.payloads_data.get("get_response_104") + get_resp_105 = self.payloads_data.get("get_response_105") + + self.run_dcnm_send.side_effect = [ + get_resp_101, + get_resp_102, + get_resp_103, + get_resp_104, + get_resp_105, + ] + if "test_dcnm_policy_query_with_template_name" == self._testMethodName: + + have_resp_101_105 = self.payloads_data.get("have_response_101_105") + + self.run_dcnm_send.side_effect = [ + have_resp_101_105, + ] + if ( + "test_dcnm_policy_query_with_template_name_match_multi" + == self._testMethodName + ): + + have_resp_101_105_multi = self.payloads_data.get( + "have_response_101_105_multi" + ) + + self.run_dcnm_send.side_effect = [ + have_resp_101_105_multi, + ] + + def load_fixtures(self, response=None, device=""): # setup the side effects self.run_dcnm_fabric_details.side_effect = [self.mock_fab_inv] @@ -382,818 +520,1041 @@ def load_fixtures(self, response=None, device=''): self.run_dcnm_version_supported.side_effect = [11] # Load policy related side-effects - self.load_policy_fixtures () + self.load_policy_fixtures() -#################################### FIXTURES END ############################ -#################################### TEST-CASES ############################## + # -------------------------- FIXTURES END -------------------------- + # -------------------------- TEST-CASES -------------------------- - def test_dcnm_policy_merged_new (self): + def test_dcnm_policy_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_105') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_101_105") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 5) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True) - count = count + 1 - - def test_dcnm_policy_merged_same_template (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True + ) + count = count + 1 + + def test_dcnm_policy_merged_same_template(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_101_5') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_101_101_5") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 5) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True) - count = count + 1 - - def test_dcnm_policy_merged_new_check_mode (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True + ) + count = count + 1 + + def test_dcnm_policy_merged_new_check_mode(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_105') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - _ansible_check_mode=True, - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_101_105") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + _ansible_check_mode=True, + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 5) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) - self.assertEqual(len(result["response"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) + self.assertEqual(len(result["response"]), 0) - def test_dcnm_policy_merged_existing (self): + def test_dcnm_policy_merged_existing(self): # Idempotence case # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_105') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_101_105") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - count = count + 1 - continue - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True) - count = count + 1 - - def test_dcnm_policy_merged_existing_and_non_exist (self): + if count < max_count: + count = count + 1 + continue + + if count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True + ) + count = count + 1 + + def test_dcnm_policy_merged_existing_and_non_exist(self): # Idempotence case # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_105') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_101_105") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True) - count = count + 1 - - def test_dcnm_policy_without_state (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True + ) + count = count + 1 + + def test_dcnm_policy_without_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_without_state_104_105') + self.playbook_config = self.config_data.get( + "create_policy_without_state_104_105" + ) - set_module_args(dict(deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + set_module_args( + dict(deploy=True, fabric="mmudigon", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True) - count = count + 1 - - def test_dcnm_policy_merge_additional_policies (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True + ) + count = count + 1 + + def test_dcnm_policy_merge_additional_policies(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_additional_flags_104') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "create_policy_additional_flags_104" + ) + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True) - count = count + 1 - - def test_dcnm_policy_merge_additional_policies_exist (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True + ) + count = count + 1 + + def test_dcnm_policy_merge_additional_policies_exist(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_additional_flags_104') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "create_policy_additional_flags_104" + ) + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True) - count = count + 1 - - def test_dcnm_policy_merge_multiple_switches (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True + ) + count = count + 1 + + def test_dcnm_policy_merge_multiple_switches(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_multi_switch_101_105') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "create_policy_multi_switch_101_105" + ) + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 8) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 8) + self.assertEqual(len(result["diff"][0]["merged"]), 8) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 8) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True) - self.assertEqual((len(resp["DATA"][1]["successPTIList"].split(",")) == 3), True) - self.assertEqual((len(resp["DATA"][2]["successPTIList"].split(",")) == 3), True) - count = count + 1 - - def test_dcnm_policy_merge_no_deploy (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 2), True + ) + self.assertEqual( + (len(resp["DATA"][1]["successPTIList"].split(",")) == 3), True + ) + self.assertEqual( + (len(resp["DATA"][2]["successPTIList"].split(",")) == 3), True + ) + count = count + 1 + + def test_dcnm_policy_merge_no_deploy(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_101_105') + self.playbook_config = self.config_data.get("create_policy_101_105") - set_module_args(dict(state='merged', - fabric='mmudigon', - config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="mmudigon", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 5) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 5) + self.assertEqual(len(result["diff"][0]["merged"]), 5) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 5) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True) - count = count + 1 - - def test_dcnm_policy_merge_deploy_false (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 5), True + ) + count = count + 1 + + def test_dcnm_policy_merge_deploy_false(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_no_deploy_104') - - set_module_args(dict(state='merged', - deploy=False, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_no_deploy_104") + + set_module_args( + dict( + state="merged", + deploy=False, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 1) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual((count < max_count), True) - count = count + 1 - - def test_dcnm_policy_merged_new_with_vars (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual((count < max_count), True) + count = count + 1 + + def test_dcnm_policy_merged_new_with_vars(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_125_127_with_vars') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_policy_125_127_with_vars") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 3), True) - count = count + 1 - - def test_dcnm_policy_modify_with_template_name (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 3), True + ) + count = count + 1 + + def test_dcnm_policy_modify_with_template_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('modify_policy_104_with_template_name') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "modify_policy_104_with_template_name" + ) + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 1) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True) - count = count + 1 - - def test_dcnm_policy_modify_with_policy_id (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True + ) + count = count + 1 + + def test_dcnm_policy_modify_with_policy_id(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('modify_policy_104_with_policy_id') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("modify_policy_104_with_policy_id") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 1) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True) - count = count + 1 - - def test_dcnm_policy_modify_policy_with_vars (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True + ) + count = count + 1 + + def test_dcnm_policy_modify_policy_with_vars(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('modify_policy_125_with_vars') - - set_module_args(dict(state='merged', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("modify_policy_125_with_vars") + + set_module_args( + dict( + state="merged", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 1) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["merged"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("is created successfully" in resp["DATA"]["successList"][0]["message"]), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True) - count = count + 1 - - def test_dcnm_policy_delete_with_template_name (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ( + "is created successfully" + in resp["DATA"]["successList"][0]["message"] + ), + True, + ) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + (len(resp["DATA"][0]["successPTIList"].split(",")) == 1), True + ) + count = count + 1 + + def test_dcnm_policy_delete_with_template_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('delete_policy_template_name_101_105') - - set_module_args(dict(state='deleted', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policy_template_name_101_105" + ) + + set_module_args( + dict( + state="deleted", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 5) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 5) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["deleted"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((resp["DATA"]["deleted"] == True), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Config deployment has been triggered" in resp["DATA"]["status"]), True) - else: - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Deleted successfully" in resp["DATA"]["message"]), True) - count = count + 1 - - def test_dcnm_policy_delete_with_policy_id (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertTrue(resp["DATA"]["deleted"]) + # MGW: self.assertEqual((resp["DATA"]["deleted"] == True), True) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Config deployment has been triggered" in resp["DATA"]["status"]), + True, + ) + else: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Deleted successfully" in resp["DATA"]["message"]), True + ) + count = count + 1 + + def test_dcnm_policy_delete_with_policy_id(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('delete_policy_policy_id_101_105') - - set_module_args(dict(state='deleted', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_policy_policy_id_101_105") + + set_module_args( + dict( + state="deleted", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 5) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 5) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["deleted"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((resp["DATA"]["deleted"] == True), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Config deployment has been triggered" in resp["DATA"]["status"]), True) - else: - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Deleted successfully" in resp["DATA"]["message"]), True) - count = count + 1 - - def test_dcnm_policy_delete_multiple_policies_with_template_name (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertTrue(resp["DATA"]["deleted"]) + # MGW: self.assertEqual((resp["DATA"]["deleted"] == True), True) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Config deployment has been triggered" in resp["DATA"]["status"]), + True, + ) + else: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Deleted successfully" in resp["DATA"]["message"]), True + ) + count = count + 1 + + def test_dcnm_policy_delete_multiple_policies_with_template_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('delete_policy_template_name_multi') - - set_module_args(dict(state='deleted', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_policy_template_name_multi") + + set_module_args( + dict( + state="deleted", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 8) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 8) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses count = 0 max_count = len(result["diff"][0]["deleted"]) for resp in result["response"]: - if (count < max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual((resp["DATA"]["deleted"] == True), True) - elif (count == max_count): - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Config deployment has been triggered" in resp["DATA"]["status"]), True) - else: - self.assertEqual(resp["RETURN_CODE"], 200) - self.assertEqual(("Deleted successfully" in resp["DATA"]["message"]), True) - count = count + 1 - - - def test_dcnm_policy_delete_with_template_name_with_second_delete (self): + if count < max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertTrue(resp["DATA"]["deleted"]) + # MGW: self.assertEqual((resp["DATA"]["deleted"] == True), True) + elif count == max_count: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Config deployment has been triggered" in resp["DATA"]["status"]), + True, + ) + else: + self.assertEqual(resp["RETURN_CODE"], 200) + self.assertEqual( + ("Deleted successfully" in resp["DATA"]["message"]), True + ) + count = count + 1 + + def test_dcnm_policy_delete_with_template_name_with_second_delete(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('delete_policy_template_name_101_105') - - set_module_args(dict(state='deleted', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policy_template_name_101_105" + ) + + set_module_args( + dict( + state="deleted", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 5) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 5) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses max_count = len(result["diff"][0]["deleted"]) for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_policy_query_with_switch_info (self): + def test_dcnm_policy_query_with_switch_info(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('query_policy_with_switch_info') - - set_module_args(dict(state='query', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_policy_with_switch_info") + + set_module_args( + dict( + state="query", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 5) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) - self.assertEqual((len(result["response"]) == 5) , True) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 5) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) + self.assertEqual((len(result["response"]) == 5), True) - def test_dcnm_policy_query_with_policy_id (self): + def test_dcnm_policy_query_with_policy_id(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('query_policy_with_policy_id') - - set_module_args(dict(state='query', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_policy_with_policy_id") + + set_module_args( + dict( + state="query", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 5) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) - self.assertEqual((len(result["response"]) == 5) , True) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 5) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) + self.assertEqual((len(result["response"]) == 5), True) - def test_dcnm_policy_query_with_template_name (self): + def test_dcnm_policy_query_with_template_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('query_policy_with_template_name') - - set_module_args(dict(state='query', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_policy_with_template_name") + + set_module_args( + dict( + state="query", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 5) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) - self.assertEqual((len(result["response"]) == 5) , True) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 5) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) + self.assertEqual((len(result["response"]) == 5), True) - def test_dcnm_policy_query_with_template_name_match_multi (self): + def test_dcnm_policy_query_with_template_name_match_multi(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('query_policy_with_template_name') - - set_module_args(dict(state='query', - deploy=True, - fabric='mmudigon', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_policy_with_template_name") + + set_module_args( + dict( + state="query", + deploy=True, + fabric="mmudigon", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 5) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["diff"][0]["skipped"]) , 0) - self.assertEqual((len(result["response"]) == 8) , True) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 5) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["diff"][0]["skipped"]), 0) + self.assertEqual((len(result["response"]) == 8), True) def test_dcnm_policy_wrong_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_policy_payloads') + self.config_data = loadPlaybookData("dcnm_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_policy_payloads") - # get mock ip_sn and fabric_inventory_details - self.mock_fab_inv = [] - self.mock_ip_sn = self.payloads_data.get('mock_ip_sn') + # get mock ip_sn and fabric_inventory_details + self.mock_fab_inv = [] + self.mock_ip_sn = self.payloads_data.get("mock_ip_sn") # load required config data - self.playbook_config = self.config_data.get('create_policy_wrong_state_104') + self.playbook_config = self.config_data.get("create_policy_wrong_state_104") - set_module_args(dict(state='replaced', - fabric='mmudigon', - config=self.playbook_config)) + set_module_args( + dict(state="replaced", fabric="mmudigon", config=self.playbook_config) + ) result = None - try: + try: result = self.execute_module(changed=False, failed=False) - except: - self.assertEqual (result, None) - + except Exception: + self.assertEqual(result, None) diff --git a/tests/unit/modules/dcnm/test_dcnm_service_node.py b/tests/unit/modules/dcnm/test_dcnm_service_node.py index f33048f9a..8a0351c6c 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_node.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_node.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2021 Cisco and/or its affiliates. +# Copyright (c) 2021-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -23,72 +22,100 @@ from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_node from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy __copyright__ = "Copyright (c) 2021 Cisco and/or its affiliates." __author__ = "Karthik Babu Harichandra Babu" + class TestDcnmServiceNodeModule(TestDcnmModule): module = dcnm_service_node - test_data = loadPlaybookData('dcnm_service_node') + test_data = loadPlaybookData("dcnm_service_node") SUCCESS_RETURN_CODE = 200 - mock_ip_sn = test_data.get('mock_ip_sn') - sn_inv_data = test_data.get('sn_inv_data') - playbook_config = test_data.get('playbook_config') - playbook_config_replace_new = test_data.get('playbook_config_replace_new') - playbook_config_replace_new1 = test_data.get('playbook_config_replace_new1') - playbook_new_config = test_data.get('playbook_new_config') - playbook_config_virtual = test_data.get('playbook_config_virtual') - playbook_config_load = test_data.get('playbook_config_load') - playbook_config_vnf = test_data.get('playbook_config_vnf') - playbook_config_vpc = test_data.get('playbook_config_vpc') - playbook_config_invalid_vpc = test_data.get('playbook_config_invalid_vpc') - playbook_config_no_params = test_data.get('playbook_config_no_params') - playbook_config_no_type = test_data.get('playbook_config_no_type') - playbook_config_no_ff = test_data.get('playbook_config_no_ff') - playbook_config_no_vpc = test_data.get('playbook_config_no_vpc') - playbook_config_more_switch = test_data.get('playbook_config_more_switch') - playbook_config_name = test_data.get('playbook_config_name') - playbook_over_config = test_data.get('playbook_over_config') - playbook_config_query = test_data.get('playbook_config_query') - get_have_failure = test_data.get('get_have_failure') - blank_data = test_data.get('blank_data') - blank_data_null = test_data.get('blank_data_null') - blank_get_data = test_data.get('blank_get_data') - error1 = test_data.get('error1') - sn_delete_success_resp = test_data.get('sn_delete_success_resp') - sn_query_success_resp = test_data.get('sn_query_success_resp') + mock_ip_sn = test_data.get("mock_ip_sn") + sn_inv_data = test_data.get("sn_inv_data") + playbook_config = test_data.get("playbook_config") + playbook_config_replace_new = test_data.get("playbook_config_replace_new") + playbook_config_replace_new1 = test_data.get("playbook_config_replace_new1") + playbook_new_config = test_data.get("playbook_new_config") + playbook_config_virtual = test_data.get("playbook_config_virtual") + playbook_config_load = test_data.get("playbook_config_load") + playbook_config_vnf = test_data.get("playbook_config_vnf") + playbook_config_vpc = test_data.get("playbook_config_vpc") + playbook_config_invalid_vpc = test_data.get("playbook_config_invalid_vpc") + playbook_config_no_params = test_data.get("playbook_config_no_params") + playbook_config_no_type = test_data.get("playbook_config_no_type") + playbook_config_no_ff = test_data.get("playbook_config_no_ff") + playbook_config_no_vpc = test_data.get("playbook_config_no_vpc") + playbook_config_more_switch = test_data.get("playbook_config_more_switch") + playbook_config_name = test_data.get("playbook_config_name") + playbook_over_config = test_data.get("playbook_over_config") + playbook_config_query = test_data.get("playbook_config_query") + get_have_failure = test_data.get("get_have_failure") + blank_data = test_data.get("blank_data") + blank_data_null = test_data.get("blank_data_null") + blank_get_data = test_data.get("blank_get_data") + error1 = test_data.get("error1") + sn_delete_success_resp = test_data.get("sn_delete_success_resp") + sn_query_success_resp = test_data.get("sn_query_success_resp") def init_data(self): # Some of the mock data is re-initialized after each test as previous test might have altered portions # of the mock data. - self.mock_sn_1_object = copy.deepcopy(self.test_data.get('mock_sn_1_object')) - self.mock_sn_merge_1_success = copy.deepcopy(self.test_data.get('mock_sn_merge_1_success')) - self.mock_sn_merge_2_success = copy.deepcopy(self.test_data.get('mock_sn_merge_2_success')) - self.mock_sn_merge_3_success = copy.deepcopy(self.test_data.get('mock_sn_merge_3_success')) - self.mock_sn_merge_4_success = copy.deepcopy(self.test_data.get('mock_sn_merge_4_success')) - self.mock_sn_merge_5_success = copy.deepcopy(self.test_data.get('mock_sn_merge_5_success')) - self.mock_sn_merge_6_success = copy.deepcopy(self.test_data.get('mock_sn_merge_6_success')) - self.mock_sn_replace_1_success = copy.deepcopy(self.test_data.get('mock_sn_replace_1_success')) - self.mock_sn_replace_2_success = copy.deepcopy(self.test_data.get('mock_sn_replace_2_success')) - self.mock_sn_have_success = copy.deepcopy(self.test_data.get('mock_sn_have_success')) - self.mock_sn_query_success = copy.deepcopy(self.test_data.get('mock_sn_query_success')) + self.mock_sn_1_object = copy.deepcopy(self.test_data.get("mock_sn_1_object")) + self.mock_sn_merge_1_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_1_success") + ) + self.mock_sn_merge_2_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_2_success") + ) + self.mock_sn_merge_3_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_3_success") + ) + self.mock_sn_merge_4_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_4_success") + ) + self.mock_sn_merge_5_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_5_success") + ) + self.mock_sn_merge_6_success = copy.deepcopy( + self.test_data.get("mock_sn_merge_6_success") + ) + self.mock_sn_replace_1_success = copy.deepcopy( + self.test_data.get("mock_sn_replace_1_success") + ) + self.mock_sn_replace_2_success = copy.deepcopy( + self.test_data.get("mock_sn_replace_2_success") + ) + self.mock_sn_have_success = copy.deepcopy( + self.test_data.get("mock_sn_have_success") + ) + self.mock_sn_query_success = copy.deepcopy( + self.test_data.get("mock_sn_query_success") + ) def setUp(self): super(TestDcnmServiceNodeModule, self).setUp() - self.mock_dcnm_ip_sn = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.get_fabric_inventory_details') + self.mock_dcnm_ip_sn = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.get_fabric_inventory_details" + ) self.run_dcnm_ip_sn = self.mock_dcnm_ip_sn.start() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.dcnm_send') + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.dcnm_send" + ) self.run_dcnm_send = self.mock_dcnm_send.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_node.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() def tearDown(self): @@ -97,122 +124,172 @@ def tearDown(self): self.mock_dcnm_ip_sn.stop() self.mock_dcnm_version_supported.stop() - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): self.run_dcnm_version_supported.return_value = 11 - if 'sn_blank_fabric' in self._testMethodName: + if "sn_blank_fabric" in self._testMethodName: self.run_dcnm_ip_sn.side_effect = [{}] else: self.run_dcnm_ip_sn.side_effect = [self.sn_inv_data] - if 'get_have_failure' in self._testMethodName: + if "get_have_failure" in self._testMethodName: self.run_dcnm_send.side_effect = [self.get_have_failure] - elif '_check_mode' in self._testMethodName: + elif "_check_mode" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [self.blank_get_data] - elif '_merged_one' in self._testMethodName: + elif "_merged_one" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_1_success] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_1_success, + ] - elif '_merged_two' in self._testMethodName: + elif "_merged_two" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_2_success] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_2_success, + ] - elif '_merged_three' in self._testMethodName: + elif "_merged_three" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_3_success] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_3_success, + ] - elif '_merged_four' in self._testMethodName: + elif "_merged_four" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_4_success] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_4_success, + ] - elif '_merged_five' in self._testMethodName: + elif "_merged_five" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_5_success] - - elif 'error1' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.error1, self.blank_data] - - elif '_merged_invalid_vpc' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_5_success, + ] + + elif "error1" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.error1, + self.blank_data, + ] + + elif "_merged_invalid_vpc" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif '_merged_no_params' in self._testMethodName: + elif "_merged_no_params" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif '_merged_no_type' in self._testMethodName: + elif "_merged_no_type" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif '_merged_no_ff' in self._testMethodName: + elif "_merged_no_ff" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif '_merged_more_switch' in self._testMethodName: + elif "_merged_more_switch" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif '_merged_no_vpc' in self._testMethodName: + elif "_merged_no_vpc" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif 'delete_std' in self._testMethodName: + elif "delete_std" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success, self.sn_delete_success_resp] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.sn_delete_success_resp, + ] - elif 'delete_all' in self._testMethodName: + elif "delete_all" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success, self.sn_delete_success_resp] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.sn_delete_success_resp, + ] - elif 'query_no' in self._testMethodName: + elif "query_no" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.sn_query_success_resp] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.sn_query_success_resp, + ] - elif 'query_on' in self._testMethodName: + elif "query_on" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success,self.mock_sn_have_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.mock_sn_have_success, + ] - elif 'query_without_config' in self._testMethodName: + elif "query_without_config" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success,self.mock_sn_have_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.mock_sn_have_success, + ] - elif 'query_withonly_name' in self._testMethodName: + elif "query_withonly_name" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success,self.mock_sn_have_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.mock_sn_have_success, + ] - elif 'query_invalid_param' in self._testMethodName: + elif "query_invalid_param" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [] - elif 'query_null' in self._testMethodName: + elif "query_null" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [self.blank_data_null] - elif 'override_with_additions' in self._testMethodName: + elif "override_with_additions" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.blank_data, self.mock_sn_merge_1_success] + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.mock_sn_merge_1_success, + ] - elif 'override_with_deletions' in self._testMethodName: + elif "override_with_deletions" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success, self.sn_delete_success_resp, self.mock_sn_merge_6_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.sn_delete_success_resp, + self.mock_sn_merge_6_success, + ] - elif 'override_without_changes' in self._testMethodName: + elif "override_without_changes" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [self.mock_sn_have_success] - elif 'replace_with_changes' in self._testMethodName: + elif "replace_with_changes" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success, self.mock_sn_replace_1_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.mock_sn_replace_1_success, + ] - elif 'replace_with_type_changes' in self._testMethodName: + elif "replace_with_type_changes" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_sn_have_success, self.mock_sn_replace_2_success] + self.run_dcnm_send.side_effect = [ + self.mock_sn_have_success, + self.mock_sn_replace_2_success, + ] - elif 'replace_without_changes' in self._testMethodName: + elif "replace_without_changes" in self._testMethodName: self.init_data() self.run_dcnm_send.side_effect = [self.mock_sn_have_success] @@ -220,286 +297,513 @@ def load_fixtures(self, response=None, device=''): pass def test_dcnm_sn_blank_fabric(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric test_fabric missing on DCNM or does not have any switches') + self.assertEqual( + result.get("msg"), + "Fabric test_fabric missing on DCNM or does not have any switches", + ) def test_dcnm_sn_get_have_failure(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric test_fabric not present on DCNM') + self.assertEqual(result.get("msg"), "Fabric test_fabric not present on DCNM") def test_dcnm_sn_check_mode(self): - set_module_args(dict(_ansible_check_mode=True, state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + _ansible_check_mode=True, + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_sn_merged_one(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'Firewall') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "Firewall") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_merged_two(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_virtual)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_virtual, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'Firewall') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "Firewall") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_merged_three(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_load)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_load, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'AVB') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "AVB") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_merged_four(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_vnf)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_vnf, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'VNF') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "VNF") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_merged_five(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_vpc)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_vpc, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'vPC1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'Firewall') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "vPC1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "Firewall") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_error1(self): - set_module_args(dict(state='merged', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['RETURN_CODE'], 400) - self.assertEqual(result['msg']['ERROR'], 'There is an error') + self.assertEqual(result["msg"]["RETURN_CODE"], 400) + self.assertEqual(result["msg"]["ERROR"], "There is an error") def test_dcnm_sn_merged_invalid_vpc(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_invalid_vpc)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_invalid_vpc, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric: test_fabric - if two switches are provided, vpc is only interface option') + self.assertEqual( + result.get("msg"), + "Fabric: test_fabric - if two switches are provided, vpc is only interface option", + ) def test_dcnm_sn_merged_more_switch(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_more_switch)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_more_switch, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric: test_fabric - Upto 2 switches only allowed') + self.assertEqual( + result.get("msg"), "Fabric: test_fabric - Upto 2 switches only allowed" + ) def test_dcnm_sn_merged_no_params(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_no_params)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_no_params, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'config: element is mandatory for this state merged') + self.assertEqual( + result.get("msg"), "config: element is mandatory for this state merged" + ) def test_dcnm_sn_merged_no_type(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_no_type)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_no_type, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Invalid parameters in playbook: karth : Invalid choice provided') + self.assertEqual( + result.get("msg"), + "Invalid parameters in playbook: karth : Invalid choice provided", + ) def test_dcnm_sn_merged_no_ff(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_no_ff)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_no_ff, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Invalid parameters in playbook: babu : Invalid choice provided') + self.assertEqual( + result.get("msg"), + "Invalid parameters in playbook: babu : Invalid choice provided", + ) def test_dcnm_sn_merged_no_vpc(self): - set_module_args(dict(state='merged', - fabric='test_fabric', - service_fabric='external', config=self.playbook_config_no_vpc)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_no_vpc, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric: test_fabric - For 1 switch, vpc is not the interface option') + self.assertEqual( + result.get("msg"), + "Fabric: test_fabric - For 1 switch, vpc is not the interface option", + ) def test_dcnm_sn_delete_std(self): - set_module_args(dict(state='deleted', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="deleted", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][0]['METHOD'], 'DELETE') + self.assertEqual(result["response"][0]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual(result["response"][0]["METHOD"], "DELETE") def test_dcnm_sn_delete_all(self): - set_module_args(dict(state='deleted', fabric='test_fabric', - service_fabric='external', config=[])) + set_module_args( + dict( + state="deleted", + fabric="test_fabric", + service_fabric="external", + config=[], + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][0]['METHOD'], 'DELETE') + self.assertEqual(result["response"][0]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual(result["response"][0]["METHOD"], "DELETE") def test_dcnm_sn_query_no(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result.get('response'), []) + self.assertFalse(result.get("diff")) + self.assertEqual(result.get("response"), []) def test_dcnm_sn_query_on(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result['response'][0]['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['fabricName'], 'external') - self.assertEqual(result['response'][0]['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['name'], 'SN-11') - self.assertEqual(result['response'][0]['type'], 'Firewall') + self.assertFalse(result.get("diff")) + self.assertEqual(result["response"][0]["attachedFabricName"], "test_fabric") + self.assertEqual( + result["response"][0]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["fabricName"], "external") + self.assertEqual(result["response"][0]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["name"], "SN-11") + self.assertEqual(result["response"][0]["type"], "Firewall") def test_dcnm_sn_query_without_config(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=[])) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=[], + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result['response'][0]['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['fabricName'], 'external') - self.assertEqual(result['response'][0]['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['name'], 'SN-11') - self.assertEqual(result['response'][0]['type'], 'Firewall') + self.assertFalse(result.get("diff")) + self.assertEqual(result["response"][0]["attachedFabricName"], "test_fabric") + self.assertEqual( + result["response"][0]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["fabricName"], "external") + self.assertEqual(result["response"][0]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["name"], "SN-11") + self.assertEqual(result["response"][0]["type"], "Firewall") def test_dcnm_sn_query_withonly_name(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=self.playbook_config_name)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_name, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result['response'][0]['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['fabricName'], 'external') - self.assertEqual(result['response'][0]['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['name'], 'SN-11') - self.assertEqual(result['response'][0]['type'], 'Firewall') + self.assertFalse(result.get("diff")) + self.assertEqual(result["response"][0]["attachedFabricName"], "test_fabric") + self.assertEqual( + result["response"][0]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["fabricName"], "external") + self.assertEqual(result["response"][0]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["name"], "SN-11") + self.assertEqual(result["response"][0]["type"], "Firewall") def test_dcnm_sn_query_invalid_param(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=self.playbook_config_query)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_query, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Invalid parameters in playbook: name : Required parameter not found') + self.assertEqual( + result.get("msg"), + "Invalid parameters in playbook: name : Required parameter not found", + ) def test_dcnm_sn_query_null(self): - set_module_args(dict(state='query', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Unable to Service Node under fabric: test_fabric') + self.assertEqual( + result.get("msg"), "Unable to Service Node under fabric: test_fabric" + ) def test_dcnm_sn_override_with_additions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', - service_fabric='external', config=self.playbook_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Physical') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv1') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'Firewall') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Physical") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv1") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "Firewall") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_override_with_deletions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', - service_fabric='external', config=self.playbook_new_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_new_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][0]['METHOD'], 'DELETE') - self.assertEqual(result['response'][1]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][1]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/2') - self.assertEqual(result['response'][1]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][1]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][1]['DATA']['interfaceName'], 'scv12') - self.assertEqual(result['response'][1]['DATA']['name'], 'SN-12') - self.assertEqual(result['response'][1]['DATA']['type'], 'ADC') - self.assertEqual(result['response'][1]['RETURN_CODE'], 200) + self.assertEqual(result["response"][0]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual(result["response"][0]["METHOD"], "DELETE") + self.assertEqual( + result["response"][1]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][1]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/2" + ) + self.assertEqual(result["response"][1]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][1]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][1]["DATA"]["interfaceName"], "scv12") + self.assertEqual(result["response"][1]["DATA"]["name"], "SN-12") + self.assertEqual(result["response"][1]["DATA"]["type"], "ADC") + self.assertEqual(result["response"][1]["RETURN_CODE"], 200) def test_dcnm_sn_override_without_changes(self): - set_module_args(dict(state='overridden', fabric='test_fabric', - service_fabric='external', config=self.playbook_over_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_over_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_sn_replace_with_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', - service_fabric='external', config=self.playbook_config_replace_new)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_replace_new, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][0]['METHOD'], 'PUT') - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv11') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'Firewall') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual(result["response"][0]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual(result["response"][0]["METHOD"], "PUT") + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv11") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "Firewall") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_replace_with_type_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', - service_fabric='external', config=self.playbook_config_replace_new1)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_config_replace_new1, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result['response'][0]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][0]['METHOD'], 'PUT') - self.assertEqual(result['response'][0]['DATA']['attachedFabricName'], 'test_fabric') - self.assertEqual(result['response'][0]['DATA']['attachedSwitchInterfaceName'], 'Ethernet1/1') - self.assertEqual(result['response'][0]['DATA']['fabricName'], 'external') - self.assertEqual(result['response'][0]['DATA']['formFactor'], 'Virtual') - self.assertEqual(result['response'][0]['DATA']['interfaceName'], 'scv11') - self.assertEqual(result['response'][0]['DATA']['name'], 'SN-11') - self.assertEqual(result['response'][0]['DATA']['type'], 'ADC') - self.assertEqual(result['response'][0]['RETURN_CODE'], 200) + self.assertEqual(result["response"][0]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual(result["response"][0]["METHOD"], "PUT") + self.assertEqual( + result["response"][0]["DATA"]["attachedFabricName"], "test_fabric" + ) + self.assertEqual( + result["response"][0]["DATA"]["attachedSwitchInterfaceName"], "Ethernet1/1" + ) + self.assertEqual(result["response"][0]["DATA"]["fabricName"], "external") + self.assertEqual(result["response"][0]["DATA"]["formFactor"], "Virtual") + self.assertEqual(result["response"][0]["DATA"]["interfaceName"], "scv11") + self.assertEqual(result["response"][0]["DATA"]["name"], "SN-11") + self.assertEqual(result["response"][0]["DATA"]["type"], "ADC") + self.assertEqual(result["response"][0]["RETURN_CODE"], 200) def test_dcnm_sn_replace_without_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', - service_fabric='external', config=self.playbook_over_config)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + service_fabric="external", + config=self.playbook_over_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) diff --git a/tests/unit/modules/dcnm/test_dcnm_service_policy.py b/tests/unit/modules/dcnm/test_dcnm_service_policy.py index 9b9432074..d1c0ae061 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_policy.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_policy.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -23,7 +22,9 @@ from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_policy from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy + class TestDcnmServicePolicyModule(TestDcnmModule): @@ -33,24 +34,30 @@ class TestDcnmServicePolicyModule(TestDcnmModule): def init_data(self): self.fd = None - def log_msg (self, msg): + def log_msg(self, msg): - if (self.fd is None): + if self.fd is None: self.fd = open("sp-ut.log", "w+") - self.fd.write (msg) + self.fd.write(msg) def setUp(self): super(TestDcnmServicePolicyModule, self).setUp() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_send') - self.run_dcnm_send = self.mock_dcnm_send.start() + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_reset_connection = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_reset_connection') - self.run_dcnm_reset_connection = self.mock_dcnm_reset_connection.start() + self.mock_dcnm_reset_connection = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_policy.dcnm_reset_connection" + ) + self.run_dcnm_reset_connection = self.mock_dcnm_reset_connection.start() def tearDown(self): @@ -59,1678 +66,2011 @@ def tearDown(self): self.mock_dcnm_version_supported.stop() self.mock_dcnm_reset_connection.stop() -#################################### FIXTURES ############################ + # -------------------------- FIXTURES -------------------------- - def load_sp_fixtures (self): + def load_sp_fixtures(self): - if ('test_dcnm_sp_merged_new' == self._testMethodName): + if "test_dcnm_sp_merged_new" == self._testMethodName: - have_sp1_resp = [] - have_sp2_resp = [] - have_sp3_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + have_sp1_resp = [] + have_sp2_resp = [] + have_sp3_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp2, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_merged_new_no_opt_elems' == self._testMethodName): - - have_sp1_resp = [] - have_sp2_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + get_snt_resp2, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_merged_new_no_opt_elems" == self._testMethodName: + + have_sp1_resp = [] + have_sp2_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, - have_sp1_resp, have_sp2_resp, - create_sp1_resp, create_sp2_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_merged_existing_no_opt_elems' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + have_sp1_resp, + have_sp2_resp, + create_sp1_resp, + create_sp2_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_merged_existing_no_opt_elems" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, - have_sp1_resp, have_sp2_resp, - get_sn1_att_status, get_sn2_att_status, - create_sp1_resp, create_sp2_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_merged_new_check_mode' == self._testMethodName): + get_snt_resp1, + get_snt_resp2, + have_sp1_resp, + have_sp2_resp, + get_sn1_att_status, + get_sn2_att_status, + create_sp1_resp, + create_sp2_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_merged_new_check_mode" == self._testMethodName: pass - if ('test_dcnm_sp_merged_new_unauth_error' == self._testMethodName): + if "test_dcnm_sp_merged_new_unauth_error" == self._testMethodName: - have_sp1_resp = [] - have_sp2_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - resp_unauth_err = self.payloads_data.get('resp_unauth_err') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + have_sp1_resp = [] + have_sp2_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + resp_unauth_err = self.payloads_data.get("resp_unauth_err") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, - have_sp1_resp, have_sp2_resp, - resp_unauth_err, [], - create_sp1_resp, create_sp2_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_config_without_state' == self._testMethodName): - - have_sp1_resp = [] - have_sp2_resp = [] - have_sp3_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + have_sp1_resp, + have_sp2_resp, + resp_unauth_err, + [], + create_sp1_resp, + create_sp2_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_config_without_state" == self._testMethodName: + + have_sp1_resp = [] + have_sp2_resp = [] + have_sp3_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, get_sn2_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_merge_no_deploy' == self._testMethodName): - - have_sp1_resp = [] - have_sp2_resp = [] - have_sp3_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_merge_no_deploy" == self._testMethodName: + + have_sp1_resp = [] + have_sp2_resp = [] + have_sp3_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status, - get_sn2_att_status - ] + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + get_sn2_att_status, + ] pass - if ('test_dcnm_sp_merge_deploy_false' == self._testMethodName): + if "test_dcnm_sp_merge_deploy_false" == self._testMethodName: - have_sp1_resp = [] - have_sp2_resp = [] - have_sp3_resp = [] - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') + have_sp1_resp = [] + have_sp2_resp = [] + have_sp3_resp = [] + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - ] - - if ('test_dcnm_sp_merged_existing_and_non_existing' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = [] - have_sp3_resp = [] - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + ] + + if "test_dcnm_sp_merged_existing_and_non_existing" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = [] + have_sp3_resp = [] + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - get_sn1_att_status, - create_sp2_resp, create_sp3_resp, - deploy_sp2_sp3_resp, - get_sn2_att_status, - get_sn2_att_status - ] + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + get_sn1_att_status, + create_sp2_resp, + create_sp3_resp, + deploy_sp2_sp3_resp, + get_sn2_att_status, + get_sn2_att_status, + ] pass - if ('test_dcnm_sp_merged_update_existing' == self._testMethodName): + if "test_dcnm_sp_merged_update_existing" == self._testMethodName: pass - if ('test_dcnm_sp_delete_existing_no_config' == self._testMethodName): - - get_snodes_resp = self.payloads_data.get('get_service_nodes_resp') - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + if "test_dcnm_sp_delete_existing_no_config" == self._testMethodName: + + get_snodes_resp = self.payloads_data.get("get_service_nodes_resp") + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snodes_resp, - get_policy_with_sn1, get_policy_with_sn2, - det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_with_node_names' == self._testMethodName): - - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_snodes_resp, + get_policy_with_sn1, + get_policy_with_sn2, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if "test_dcnm_sp_delete_existing_with_node_names" == self._testMethodName: + + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_policy_with_sn1, get_policy_with_sn2, - det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_with_node_name_and_policy_name' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') + get_policy_with_sn1, + get_policy_with_sn2, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if ( + "test_dcnm_sp_delete_existing_with_node_name_and_policy_name" + == self._testMethodName + ): + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") self.run_dcnm_send.side_effect = [ - have_sp1_resp, have_sp2_resp, have_sp3_resp, - det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_with_node_name_and_rp_name' == self._testMethodName): - - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if ( + "test_dcnm_sp_delete_existing_with_node_name_and_rp_name" + == self._testMethodName + ): + + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_policy_with_sn1, get_policy_with_sn2, - det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp - ] - - if ('test_dcnm_sp_delete_existing_detach_unauth_err' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - resp_unauth_err = self.payloads_data.get('resp_unauth_err') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_policy_with_sn1, + get_policy_with_sn2, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + ] + + if "test_dcnm_sp_delete_existing_detach_unauth_err" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + resp_unauth_err = self.payloads_data.get("resp_unauth_err") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - have_sp1_resp, have_sp2_resp, have_sp3_resp, - resp_unauth_err, det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_delete_deploy_unauth_err' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') - resp_unauth_err = self.payloads_data.get('resp_unauth_err') + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + resp_unauth_err, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if ( + "test_dcnm_sp_delete_existing_delete_deploy_unauth_err" + == self._testMethodName + ): + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") + resp_unauth_err = self.payloads_data.get("resp_unauth_err") self.run_dcnm_send.side_effect = [ - have_sp1_resp, have_sp2_resp, have_sp3_resp, - det_sp1_resp, det_sp2_sp3_resp, - resp_unauth_err, deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_delete_unauth_err' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - resp_unauth_err = self.payloads_data.get('resp_unauth_err') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + det_sp1_resp, + det_sp2_sp3_resp, + resp_unauth_err, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if "test_dcnm_sp_delete_existing_delete_unauth_err" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + resp_unauth_err = self.payloads_data.get("resp_unauth_err") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - have_sp1_resp, have_sp2_resp, have_sp3_resp, - det_sp1_resp, det_sp2_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - resp_unauth_err, delete_sp1_resp, delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_existing_and_non_existing' == self._testMethodName): - - have_sp1_resp = [] - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + resp_unauth_err, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] + + if "test_dcnm_sp_delete_existing_and_non_existing" == self._testMethodName: + + have_sp1_resp = [] + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - have_sp1_resp, have_sp2_resp, have_sp3_resp, - det_sp2_sp3_resp, - deploy_sp2_sp3_resp, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp2_resp, - delete_sp3_resp - ] - - if ('test_dcnm_sp_delete_non_existing' == self._testMethodName): + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + det_sp2_sp3_resp, + deploy_sp2_sp3_resp, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp2_resp, + delete_sp3_resp, + ] + + if "test_dcnm_sp_delete_non_existing" == self._testMethodName: self.run_dcnm_send.side_effect = [[], [], [], [], [], [], []] - if ('test_dcnm_sp_replace_sp1_to_sp3_non_existing' == self._testMethodName): - - have_sp1_resp = [] - have_sp2_resp = [] - have_sp3_resp = [] - get_sp1_resp = self.payloads_data.get('get_sp1_resp') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - resp_unauth_err = self.payloads_data.get('resp_unauth_err') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + if "test_dcnm_sp_replace_sp1_to_sp3_non_existing" == self._testMethodName: + + have_sp1_resp = [] + have_sp2_resp = [] + have_sp3_resp = [] + get_sp1_resp = self.payloads_data.get("get_sp1_resp") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + resp_unauth_err = self.payloads_data.get("resp_unauth_err") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - resp_unauth_err, get_sp1_resp, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - resp_unauth_err, - deploy_sp1_resp, deploy_sp2_sp3_resp , - get_sn1_att_status, - get_sn2_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_replace_sp1_to_sp3_existing' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - create_sp2_resp = self.payloads_data.get('create_sp2_resp') - create_sp3_resp = self.payloads_data.get('create_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + resp_unauth_err, + get_sp1_resp, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + resp_unauth_err, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_replace_sp1_to_sp3_existing" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + create_sp2_resp = self.payloads_data.get("create_sp2_resp") + create_sp3_resp = self.payloads_data.get("create_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - get_sn1_att_status, get_sn2_att_status, - create_sp1_resp, create_sp2_resp, create_sp3_resp, - deploy_sp1_resp, deploy_sp2_sp3_resp, - get_sn1_att_status, - get_sn2_att_status, - get_sn2_att_status - ] - - if ('test_dcnm_sp_replace_sp1_to_sp3_existing_no_change' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_snt_resp2 = self.payloads_data.get('get_snt2_response') - get_snt_resp3 = self.payloads_data.get('get_snt2_response') + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + create_sp1_resp, + create_sp2_resp, + create_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_replace_sp1_to_sp3_existing_no_change" == self._testMethodName: + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_snt_resp2 = self.payloads_data.get("get_snt2_response") + get_snt_resp3 = self.payloads_data.get("get_snt2_response") self.run_dcnm_send.side_effect = [ - get_snt_resp1, get_snt_resp2, get_snt_resp3, - have_sp1_resp, have_sp2_resp, have_sp3_resp, - get_sn1_att_status, get_sn2_att_status - ] - - if ('test_dcnm_sp_override_with_new_peerings' == self._testMethodName): - - have_sp1_resp = [] - get_snodes_resp = self.payloads_data.get('get_service_nodes_resp') - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_snt_resp1, + get_snt_resp2, + get_snt_resp3, + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + get_sn1_att_status, + get_sn2_att_status, + ] + + if "test_dcnm_sp_override_with_new_peerings" == self._testMethodName: + + have_sp1_resp = [] + get_snodes_resp = self.payloads_data.get("get_service_nodes_resp") + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, - have_sp1_resp, - get_snodes_resp, - get_policy_with_sn1, get_policy_with_sn2, - create_sp1_resp, - det_sp2_sp3_resp, - deploy_sp2_sp3_resp, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp2_resp, delete_sp3_resp, - deploy_sp1_resp, - get_sn1_att_status, - ] - - if ('test_dcnm_sp_override_with_existing_peering' == self._testMethodName): - - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_snodes_resp = self.payloads_data.get('get_service_nodes_resp') - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_snt_resp1, + have_sp1_resp, + get_snodes_resp, + get_policy_with_sn1, + get_policy_with_sn2, + create_sp1_resp, + det_sp2_sp3_resp, + deploy_sp2_sp3_resp, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp2_resp, + delete_sp3_resp, + deploy_sp1_resp, + get_sn1_att_status, + ] + + if "test_dcnm_sp_override_with_existing_peering" == self._testMethodName: + + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_snodes_resp = self.payloads_data.get("get_service_nodes_resp") + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, - have_sp1_resp, - get_snodes_resp, - get_policy_with_sn1, get_policy_with_sn2, - get_sn1_att_status, - det_sp2_sp3_resp, - deploy_sp2_sp3_resp, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp2_resp, delete_sp3_resp - ] - - if ('test_dcnm_sp_override_with_existing_peering_updated' == self._testMethodName): - - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - get_snodes_resp = self.payloads_data.get('get_service_nodes_resp') - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - get_snt_resp1 = self.payloads_data.get('get_snt1_response') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - create_sp1_resp = self.payloads_data.get('create_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - get_sn1_att_status = self.payloads_data.get('get_sn1_att_status') - get_sn2_att_status = self.payloads_data.get('get_sn2_att_status') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_snt_resp1, + have_sp1_resp, + get_snodes_resp, + get_policy_with_sn1, + get_policy_with_sn2, + get_sn1_att_status, + det_sp2_sp3_resp, + deploy_sp2_sp3_resp, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp2_resp, + delete_sp3_resp, + ] + + if ( + "test_dcnm_sp_override_with_existing_peering_updated" + == self._testMethodName + ): + + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + get_snodes_resp = self.payloads_data.get("get_service_nodes_resp") + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + get_snt_resp1 = self.payloads_data.get("get_snt1_response") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + create_sp1_resp = self.payloads_data.get("create_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + get_sn1_att_status = self.payloads_data.get("get_sn1_att_status") + get_sn2_att_status = self.payloads_data.get("get_sn2_att_status") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snt_resp1, - have_sp1_resp, - get_snodes_resp, - get_policy_with_sn1, get_policy_with_sn2, - get_sn1_att_status, - create_sp1_resp, - det_sp2_sp3_resp, - deploy_sp2_sp3_resp, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp2_resp, delete_sp3_resp, - deploy_sp1_resp, - get_sn1_att_status - ] - - if ('test_dcnm_sp_override_with_no_config' == self._testMethodName): - - get_snodes_resp = self.payloads_data.get('get_service_nodes_resp') - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') - det_sp1_resp = self.payloads_data.get('detach_sp1_resp') - det_sp2_sp3_resp = self.payloads_data.get('detach_sp2_sp3_resp') - deploy_sp1_resp = self.payloads_data.get('deploy_sp1_resp') - deploy_sp2_sp3_resp = self.payloads_data.get('deploy_sp2_sp3_resp') - delete_sp1_resp = self.payloads_data.get('delete_sp1_resp') - delete_sp2_resp = self.payloads_data.get('delete_sp2_resp') - delete_sp3_resp = self.payloads_data.get('delete_sp3_resp') - get_dd_sn1_att_status = self.payloads_data.get('get_dd_sn1_att_status') - get_dd_sn2_att_status = self.payloads_data.get('get_dd_sn2_att_status') + get_snt_resp1, + have_sp1_resp, + get_snodes_resp, + get_policy_with_sn1, + get_policy_with_sn2, + get_sn1_att_status, + create_sp1_resp, + det_sp2_sp3_resp, + deploy_sp2_sp3_resp, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp2_resp, + delete_sp3_resp, + deploy_sp1_resp, + get_sn1_att_status, + ] + + if "test_dcnm_sp_override_with_no_config" == self._testMethodName: + + get_snodes_resp = self.payloads_data.get("get_service_nodes_resp") + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") + det_sp1_resp = self.payloads_data.get("detach_sp1_resp") + det_sp2_sp3_resp = self.payloads_data.get("detach_sp2_sp3_resp") + deploy_sp1_resp = self.payloads_data.get("deploy_sp1_resp") + deploy_sp2_sp3_resp = self.payloads_data.get("deploy_sp2_sp3_resp") + delete_sp1_resp = self.payloads_data.get("delete_sp1_resp") + delete_sp2_resp = self.payloads_data.get("delete_sp2_resp") + delete_sp3_resp = self.payloads_data.get("delete_sp3_resp") + get_dd_sn1_att_status = self.payloads_data.get("get_dd_sn1_att_status") + get_dd_sn2_att_status = self.payloads_data.get("get_dd_sn2_att_status") self.run_dcnm_send.side_effect = [ - get_snodes_resp, - get_policy_with_sn1, get_policy_with_sn2, - det_sp1_resp, - det_sp2_sp3_resp, - deploy_sp1_resp, - deploy_sp2_sp3_resp, - get_dd_sn1_att_status, - get_dd_sn2_att_status, - get_dd_sn2_att_status, - delete_sp1_resp, delete_sp2_resp, delete_sp3_resp, - ] + get_snodes_resp, + get_policy_with_sn1, + get_policy_with_sn2, + det_sp1_resp, + det_sp2_sp3_resp, + deploy_sp1_resp, + deploy_sp2_sp3_resp, + get_dd_sn1_att_status, + get_dd_sn2_att_status, + get_dd_sn2_att_status, + delete_sp1_resp, + delete_sp2_resp, + delete_sp3_resp, + ] - if ('test_dcnm_sp_query_non_existing' == self._testMethodName): + if "test_dcnm_sp_query_non_existing" == self._testMethodName: - self.run_dcnm_send.side_effect = [[],[],[]] + self.run_dcnm_send.side_effect = [[], [], []] - if ('test_dcnm_sp_query_with_service_node1' == self._testMethodName): + if "test_dcnm_sp_query_with_service_node1" == self._testMethodName: - get_policy_with_sn1 = self.payloads_data.get('get_policy_with_sn1') + get_policy_with_sn1 = self.payloads_data.get("get_policy_with_sn1") self.run_dcnm_send.side_effect = [ - get_policy_with_sn1, - ] + get_policy_with_sn1, + ] - if ('test_dcnm_sp_query_with_service_node2' == self._testMethodName): + if "test_dcnm_sp_query_with_service_node2" == self._testMethodName: - get_policy_with_sn2 = self.payloads_data.get('get_policy_with_sn2') + get_policy_with_sn2 = self.payloads_data.get("get_policy_with_sn2") self.run_dcnm_send.side_effect = [ - get_policy_with_sn2, - ] + get_policy_with_sn2, + ] - if ('test_dcnm_sp_query_existing_with_node_and_policy' == self._testMethodName): + if "test_dcnm_sp_query_existing_with_node_and_policy" == self._testMethodName: - have_sp1_resp = self.payloads_data.get('get_sp1_resp') - have_sp2_resp = self.payloads_data.get('get_sp2_resp') - have_sp3_resp = self.payloads_data.get('get_sp3_resp') + have_sp1_resp = self.payloads_data.get("get_sp1_resp") + have_sp2_resp = self.payloads_data.get("get_sp2_resp") + have_sp3_resp = self.payloads_data.get("get_sp3_resp") self.run_dcnm_send.side_effect = [ - have_sp1_resp, - have_sp2_resp, - have_sp3_resp, - ] + have_sp1_resp, + have_sp2_resp, + have_sp3_resp, + ] - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): self.run_dcnm_version_supported.side_effect = [11] # Load service policy related side-effects - self.load_sp_fixtures () + self.load_sp_fixtures() -#################################### FIXTURES END ############################ -#################################### TEST-CASES ############################## + # -------------------------- FIXTURES END -------------------------- + # -------------------------- TEST-CASES ---------------------------- - def test_dcnm_sp_merged_new (self): + def test_dcnm_sp_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merged_new_no_opt_elems (self): + + def test_dcnm_sp_merged_new_no_opt_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp2_no_opt_elems') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp2_no_opt_elems") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merged_new_unauth_error (self): + + def test_dcnm_sp_merged_new_unauth_error(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp2_no_opt_elems') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp2_no_opt_elems") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merged_existing_no_opt_elems (self): + + def test_dcnm_sp_merged_existing_no_opt_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp2_no_opt_elems') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp2_no_opt_elems") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 2) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 2) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merged_new_check_mode (self): + + def test_dcnm_sp_merged_new_check_mode(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - _ansible_check_mode=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + _ansible_check_mode=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - def test_dcnm_sp_config_without_state (self): + def test_dcnm_sp_config_without_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merge_no_deploy (self): + + def test_dcnm_sp_merge_no_deploy(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_merge_deploy_false (self): + + def test_dcnm_sp_merge_deploy_false(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=False, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=False, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - + def test_dcnm_sp_wrong_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp7_config') - - set_module_args(dict(state='wrong_state', - attach=True, - deploy=False, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp7_config") + + set_module_args( + dict( + state="wrong_state", + attach=True, + deploy=False, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = None - try: + try: result = self.execute_module(changed=False, failed=False) - except: - self.assertEqual (result, None) + except Exception: + self.assertEqual(result, None) - def test_dcnm_sp_merge_no_mand_elems (self): + def test_dcnm_sp_merge_no_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_policy_no_mand_elems') + self.playbook_config = self.config_data.get("create_policy_no_mand_elems") - ## No dest_port + # No dest_port cfg = copy.deepcopy(self.playbook_config) cfg[0]["policy"].pop("dest_port") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('dest_port : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("dest_port : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No src_port + # No src_port cfg = copy.deepcopy(self.playbook_config) cfg[0]["policy"].pop("src_port") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('src_port : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("src_port : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No proto + # No proto cfg = copy.deepcopy(self.playbook_config) cfg[0]["policy"].pop("proto") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('proto : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual(("proto : Required parameter not found" in (str(e))), True) + self.assertEqual(result, None) - - ## No next hop + # No next hop cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("next_hop") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('next_hop : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("next_hop : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No dest_network + # No dest_network cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("dest_network") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('dest_network : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("dest_network : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No src_network + # No src_network cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("src_network") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('src_network : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("src_network : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No dst_vrf + # No dst_vrf cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("dest_vrf") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('dest_vrf : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("dest_vrf : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No src_vrf + # No src_vrf cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("src_vrf") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('src_vrf : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("src_vrf : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No RP name + # No RP name cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("rp_name") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('rp_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("rp_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No policy name + # No policy name cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("name") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual(("name : Required parameter not found" in (str(e))), True) + self.assertEqual(result, None) - ## No node name object + # No node name object cfg = copy.deepcopy(self.playbook_config) cfg[0].pop("node_name") - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg)) + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_sp_merged_existing_and_non_existing (self): + def test_dcnm_sp_merged_existing_and_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_no_config (self): + + def test_dcnm_sp_delete_existing_no_config(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_no_config') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_policies_no_config") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_with_node_names (self): + + def test_dcnm_sp_delete_existing_with_node_names(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_node_names') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_policies_with_node_names") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_with_node_name_and_policy_name (self): + + def test_dcnm_sp_delete_existing_with_node_name_and_policy_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_node_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_with_node_name_and_rp_name (self): + + def test_dcnm_sp_delete_existing_with_node_name_and_rp_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_node_name_and_rp_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_node_name_and_rp_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 2) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_detach_unauth_err (self): + + def test_dcnm_sp_delete_existing_detach_unauth_err(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_node_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_delete_deploy_unauth_err (self): + + def test_dcnm_sp_delete_existing_delete_deploy_unauth_err(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_node_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_delete_unauth_err (self): + + def test_dcnm_sp_delete_existing_delete_unauth_err(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_node_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_existing_and_non_existing (self): + + def test_dcnm_sp_delete_existing_and_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_node_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 2) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_non_existing (self): + + def test_dcnm_sp_delete_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_with_name_and_no_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "delete_policies_with_name_and_no_name" + ) + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_delete_no_mand_elems (self): + + def test_dcnm_sp_delete_no_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_policies_no_mand_elems') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_policies_no_mand_elems") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_sp_replace_sp1_to_sp3_non_existing (self): + def test_dcnm_sp_replace_sp1_to_sp3_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('replace_sp1_sp3_config') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("replace_sp1_sp3_config") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_replace_sp1_to_sp3_existing (self): + + def test_dcnm_sp_replace_sp1_to_sp3_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('replace_sp1_sp3_config') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("replace_sp1_sp3_config") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 3) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 3) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_replace_sp1_to_sp3_existing_no_change (self): + + def test_dcnm_sp_replace_sp1_to_sp3_existing_no_change(self): pass # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('create_sp1_sp3_config') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_sp1_sp3_config") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_override_with_new_peerings (self): + + def test_dcnm_sp_override_with_new_peerings(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('override_policies_create_new') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_policies_create_new") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 1) - self.assertEqual(len(result["diff"][0]["deleted"]) , 2) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_override_with_existing_peering (self): + + def test_dcnm_sp_override_with_existing_peering(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('override_policies_no_change') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_policies_no_change") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 2) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_override_with_existing_peering_updated (self): + + def test_dcnm_sp_override_with_existing_peering_updated(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('override_policies_modify_exist') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_policies_modify_exist") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 2) - self.assertEqual(len(result["diff"][0]["modified"]) , 1) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + self.assertEqual(len(result["diff"][0]["modified"]), 1) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_override_with_no_config (self): + + def test_dcnm_sp_override_with_no_config(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('override_policies_no_config') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_policies_no_config") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 3) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - # Validate create and deploy responses + # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_sp_query_existing_with_node_and_policy (self): + + def test_dcnm_sp_query_existing_with_node_and_policy(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('query_with_node_and_policy_name') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_with_node_and_policy_name") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 3) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["response"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 3) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["response"]), 3) - def test_dcnm_sp_query_non_existing (self): + def test_dcnm_sp_query_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('query_non_existing') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_non_existing") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 3) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) - self.assertEqual(len(result["response"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 3) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) + self.assertEqual(len(result["response"]), 0) - def test_dcnm_sp_query_with_service_node1 (self): + def test_dcnm_sp_query_with_service_node1(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('query_with_node_name_sn1') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_with_node_name_sn1") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 1) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 1) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - self.assertEqual(len(result["response"]) , 1) + self.assertEqual(len(result["response"]), 1) - def test_dcnm_sp_query_with_service_node2 (self): + def test_dcnm_sp_query_with_service_node2(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('query_with_node_name_sn1') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_with_node_name_sn1") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 1) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 1) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - self.assertEqual(len(result["response"]) , 2) + self.assertEqual(len(result["response"]), 2) - def test_dcnm_sp_query_no_mand_elems (self): + def test_dcnm_sp_query_no_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_service_policy_configs') - self.payloads_data = loadPlaybookData('dcnm_service_policy_payloads') + self.config_data = loadPlaybookData("dcnm_service_policy_configs") + self.payloads_data = loadPlaybookData("dcnm_service_policy_payloads") # load required config data - self.playbook_config = self.config_data.get('query_no_mand_elems') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_no_mand_elems") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = None - try: + try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) diff --git a/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py b/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py index 25f8a05b4..3b822e49b 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -23,7 +22,9 @@ from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_route_peering from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy + class TestDcnmServiceRoutePeeringModule(TestDcnmModule): @@ -33,24 +34,30 @@ class TestDcnmServiceRoutePeeringModule(TestDcnmModule): def init_data(self): self.fd = None - def log_msg (self, msg): + def log_msg(self, msg): - if (self.fd is None): + if self.fd is None: self.fd = open("srp-ut.log", "w+") - self.fd.write (msg) + self.fd.write(msg) def setUp(self): super(TestDcnmServiceRoutePeeringModule, self).setUp() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_send') - self.run_dcnm_send = self.mock_dcnm_send.start() + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_reset_connection = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_reset_connection') - self.run_dcnm_reset_connection = self.mock_dcnm_reset_connection.start() + self.mock_dcnm_reset_connection = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_service_route_peering.dcnm_reset_connection" + ) + self.run_dcnm_reset_connection = self.mock_dcnm_reset_connection.start() def tearDown(self): @@ -59,1144 +66,1528 @@ def tearDown(self): self.mock_dcnm_version_supported.stop() self.mock_dcnm_reset_connection.stop() -#################################### FIXTURES ############################ - - def load_srp_fixtures (self): - - if ('test_dcnm_srp_merged_new' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - deploy_rp4_resp_unauth_err = self.payloads_data.get('deploy_rp4_resp_unauth_err') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_new_no_opt_elems' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - vlan_id_alloc_resp1 = self.payloads_data.get('vlan_id_alloc_resp_rp1') - vlan_id_alloc_resp2 = self.payloads_data.get('vlan_id_alloc_resp_rp2') - vlan_id_alloc_resp3 = self.payloads_data.get('vlan_id_alloc_resp_rp3') - vlan_id_alloc_resp4 = self.payloads_data.get('vlan_id_alloc_resp_rp4') - vlan_id_alloc_resp5 = self.payloads_data.get('vlan_id_alloc_resp_rp5') - vlan_id_alloc_resp6 = self.payloads_data.get('vlan_id_alloc_resp_rp6') - vlan_id_alloc_resp7 = self.payloads_data.get('vlan_id_alloc_resp_rp7') - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - vlan_id_alloc_resp1, - create_rp1_resp, - vlan_id_alloc_resp2, - create_rp2_resp, - vlan_id_alloc_resp3, - create_rp3_resp, - vlan_id_alloc_resp4, - create_rp4_resp, - vlan_id_alloc_resp5, - create_rp5_resp, - vlan_id_alloc_resp6, - create_rp6_resp, - vlan_id_alloc_resp7, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_existing_no_opt_elems' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_new_check_mode' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp] - - - if ('test_dcnm_srp_merged_new_invalid_request_error' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - create_rp7_resp_inv_req_err = self.payloads_data.get('create_rp7_resp_inv_req_err') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp_inv_req_err, have_rp7_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_new_invalid_fabric_error' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - create_rp7_resp_inv_fab_err = self.payloads_data.get('create_rp7_resp_inv_fab_err') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp_inv_fab_err, have_rp7_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_new_unauth_error' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - create_rp7_resp_unauth_err = self.payloads_data.get('create_rp7_resp_unauth_err') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp_unauth_err, have_rp7_resp, create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_config_without_state' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merge_no_deploy' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merge_deploy_false' == self._testMethodName): - - have_rp1_resp = [] - have_rp2_resp = [] - have_rp3_resp = [] - have_rp4_resp = [] - have_rp5_resp = [] - have_rp6_resp = [] - have_rp7_resp = [] - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp5_resp = self.payloads_data.get('create_rp5_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - create_rp1_resp, create_rp2_resp, create_rp3_resp, - create_rp4_resp, create_rp5_resp, create_rp6_resp, - create_rp7_resp] - - - if ('test_dcnm_srp_merged_existing' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - att_rp4_status, att_rp5_status, att_rp6_status, - att_rp7_status] - - if ('test_dcnm_srp_merged_existing_and_non_existing' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - att_rp5_status = self.payloads_data.get('attach_rp5_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - create_rp7_resp = self.payloads_data.get('create_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - att_rp7_status = self.payloads_data.get('attach_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, [], have_rp3_resp, - [], have_rp5_resp, [], [], - att_rp1_status, att_rp3_status, - att_rp5_status, - create_rp2_resp, create_rp4_resp, create_rp6_resp, - create_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp2_status, - att_rp4_status, att_rp6_status, - att_rp7_status - ] - - if ('test_dcnm_srp_merged_update_existing' == self._testMethodName): - - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - - self.run_dcnm_send.side_effect = [have_rp2_resp, have_rp4_resp, - att_rp2_status, att_rp4_status, - create_rp2_resp, att_rp2_status, - create_rp4_resp, att_rp4_status, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp2_status, att_rp4_status, - ] - - if ('test_dcnm_srp_merged_update_existing_unauth_err' == self._testMethodName): - - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - - create_rp4_resp_unauth_err = self.payloads_data.get('create_rp4_resp_unauth_err') - - self.run_dcnm_send.side_effect = [have_rp2_resp, have_rp4_resp, - att_rp2_status, att_rp4_status, - create_rp2_resp, att_rp2_status, - create_rp4_resp_unauth_err, - create_rp4_resp, att_rp4_status, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - att_rp2_status, att_rp4_status, - ] - - if ('test_dcnm_srp_delete_existing' == self._testMethodName): - - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp] - - if ('test_dcnm_srp_delete_existing_no_config' == self._testMethodName): - - - serv_nodes_resp = self.payloads_data.get('serv_nodes_resp') - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') + # -------------------------- FIXTURES -------------------------- + + def load_srp_fixtures(self): + + if "test_dcnm_srp_merged_new" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + deploy_rp4_resp_unauth_err = self.payloads_data.get( + "deploy_rp4_resp_unauth_err" + ) + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_new_no_opt_elems" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + vlan_id_alloc_resp1 = self.payloads_data.get("vlan_id_alloc_resp_rp1") + vlan_id_alloc_resp2 = self.payloads_data.get("vlan_id_alloc_resp_rp2") + vlan_id_alloc_resp3 = self.payloads_data.get("vlan_id_alloc_resp_rp3") + vlan_id_alloc_resp4 = self.payloads_data.get("vlan_id_alloc_resp_rp4") + vlan_id_alloc_resp5 = self.payloads_data.get("vlan_id_alloc_resp_rp5") + vlan_id_alloc_resp6 = self.payloads_data.get("vlan_id_alloc_resp_rp6") + vlan_id_alloc_resp7 = self.payloads_data.get("vlan_id_alloc_resp_rp7") + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + vlan_id_alloc_resp1, + create_rp1_resp, + vlan_id_alloc_resp2, + create_rp2_resp, + vlan_id_alloc_resp3, + create_rp3_resp, + vlan_id_alloc_resp4, + create_rp4_resp, + vlan_id_alloc_resp5, + create_rp5_resp, + vlan_id_alloc_resp6, + create_rp6_resp, + vlan_id_alloc_resp7, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_existing_no_opt_elems" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_new_check_mode" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + ] + + if "test_dcnm_srp_merged_new_invalid_request_error" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + create_rp7_resp_inv_req_err = self.payloads_data.get( + "create_rp7_resp_inv_req_err" + ) + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp_inv_req_err, + have_rp7_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_new_invalid_fabric_error" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + create_rp7_resp_inv_fab_err = self.payloads_data.get( + "create_rp7_resp_inv_fab_err" + ) + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp_inv_fab_err, + have_rp7_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_new_unauth_error" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + create_rp7_resp_unauth_err = self.payloads_data.get( + "create_rp7_resp_unauth_err" + ) + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp_unauth_err, + have_rp7_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_config_without_state" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merge_no_deploy" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merge_deploy_false" == self._testMethodName: + + have_rp1_resp = [] + have_rp2_resp = [] + have_rp3_resp = [] + have_rp4_resp = [] + have_rp5_resp = [] + have_rp6_resp = [] + have_rp7_resp = [] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp5_resp = self.payloads_data.get("create_rp5_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + create_rp4_resp, + create_rp5_resp, + create_rp6_resp, + create_rp7_resp, + ] + + if "test_dcnm_srp_merged_existing" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + att_rp4_status, + att_rp5_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_existing_and_non_existing" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + att_rp5_status = self.payloads_data.get("attach_rp5_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + create_rp7_resp = self.payloads_data.get("create_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + att_rp7_status = self.payloads_data.get("attach_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + [], + have_rp3_resp, + [], + have_rp5_resp, + [], + [], + att_rp1_status, + att_rp3_status, + att_rp5_status, + create_rp2_resp, + create_rp4_resp, + create_rp6_resp, + create_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp2_status, + att_rp4_status, + att_rp6_status, + att_rp7_status, + ] + + if "test_dcnm_srp_merged_update_existing" == self._testMethodName: + + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + + self.run_dcnm_send.side_effect = [ + have_rp2_resp, + have_rp4_resp, + att_rp2_status, + att_rp4_status, + create_rp2_resp, + att_rp2_status, + create_rp4_resp, + att_rp4_status, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp2_status, + att_rp4_status, + ] + + if "test_dcnm_srp_merged_update_existing_unauth_err" == self._testMethodName: + + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + + create_rp4_resp_unauth_err = self.payloads_data.get( + "create_rp4_resp_unauth_err" + ) + + self.run_dcnm_send.side_effect = [ + have_rp2_resp, + have_rp4_resp, + att_rp2_status, + att_rp4_status, + create_rp2_resp, + att_rp2_status, + create_rp4_resp_unauth_err, + create_rp4_resp, + att_rp4_status, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + att_rp2_status, + att_rp4_status, + ] + + if "test_dcnm_srp_delete_existing" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_delete_existing_no_config" == self._testMethodName: + + serv_nodes_resp = self.payloads_data.get("serv_nodes_resp") + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + + self.run_dcnm_send.side_effect = [ + serv_nodes_resp, + have_it_sn1_resp, + have_it_sn2_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_delete_existing_with_node_name" == self._testMethodName: + + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_it_sn1_resp, + have_it_sn2_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_delete_existing_unauth_err" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + + det_rp1_resp_unauth_err = self.payloads_data.get("det_rp1_resp_unauth_err") + deploy_rp4_resp_unauth_err = self.payloads_data.get( + "deploy_rp4_resp_unauth_err" + ) + delete_rp7_resp_unauth_err = self.payloads_data.get( + "delete_rp7_resp_unauth_err" + ) self.run_dcnm_send.side_effect = [ - serv_nodes_resp, - have_it_sn1_resp, have_it_sn2_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp] - - if ('test_dcnm_srp_delete_existing_with_node_name' == self._testMethodName): - - - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + det_rp1_resp_unauth_err, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp_unauth_err, + deploy_rp4_rp7_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_delete_existing_and_non_existing" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") self.run_dcnm_send.side_effect = [ - have_it_sn1_resp, have_it_sn2_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp] - - - if ('test_dcnm_srp_delete_existing_unauth_err' == self._testMethodName): - - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - - det_rp1_resp_unauth_err = self.payloads_data.get('det_rp1_resp_unauth_err') - deploy_rp4_resp_unauth_err = self.payloads_data.get('deploy_rp4_resp_unauth_err') - delete_rp7_resp_unauth_err = self.payloads_data.get('delete_rp7_resp_unauth_err') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp, - det_rp1_resp_unauth_err, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp_unauth_err, deploy_rp4_rp7_resp, delete_rp7_resp] - - if ('test_dcnm_srp_delete_existing_and_non_existing' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, [], have_rp3_resp, - [], [], have_rp6_resp, - have_rp7_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp3_status, - dd_rp6_status, dd_rp7_status, - delete_rp1_resp, delete_rp3_resp, - delete_rp6_resp, delete_rp7_resp] - - if ('test_dcnm_srp_delete_non_existing' == self._testMethodName): + have_rp1_resp, + [], + have_rp3_resp, + [], + [], + have_rp6_resp, + have_rp7_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp3_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp3_resp, + delete_rp6_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_delete_non_existing" == self._testMethodName: self.run_dcnm_send.side_effect = [[], [], [], [], [], [], []] - if ('test_dcnm_srp_replace_rp1_to_rp3_non_existing' == self._testMethodName): - - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - - self.run_dcnm_send.side_effect = [[], [], [], - create_rp1_resp, create_rp2_resp, create_rp3_resp, - deploy_rp1_rp3_resp, - att_rp1_status, att_rp2_status, att_rp3_status - ] - - if ('test_dcnm_srp_replace_rp1_to_rp3_existing' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp2_resp = self.payloads_data.get('create_rp2_resp') - create_rp3_resp = self.payloads_data.get('create_rp3_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - att_rp1_status, att_rp2_status, att_rp3_status, - create_rp1_resp, att_rp1_status, - create_rp2_resp, att_rp2_status, - create_rp3_resp, att_rp3_status, - deploy_rp1_rp3_resp, - att_rp1_status, att_rp2_status, att_rp3_status - ] - - if ('test_dcnm_srp_replace_rp1_to_rp3_existing_no_change' == self._testMethodName): - - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp2_status = self.payloads_data.get('attach_rp2_resp') - att_rp3_status = self.payloads_data.get('attach_rp3_resp') - - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - att_rp1_status, att_rp2_status, att_rp3_status] - - if ('test_dcnm_srp_override_rp1_rp7_with_new_peerings' == self._testMethodName): - - serv_nodes_resp = self.payloads_data.get('serv_nodes_resp') - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - create_rp1_resp = self.payloads_data.get('create_rp1_resp') - create_rp4_resp = self.payloads_data.get('create_rp4_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - deploy_rp_ovr1_resp = self.payloads_data.get('deploy_rp_ovr1_resp') - deploy_rp_ovr4_resp = self.payloads_data.get('deploy_rp_ovr4_resp') - att_rp1_status = self.payloads_data.get('attach_rp1_resp') - att_rp4_status = self.payloads_data.get('attach_rp4_resp') - - self.run_dcnm_send.side_effect = [[], [], - serv_nodes_resp, - have_it_sn1_resp, have_it_sn2_resp, - create_rp1_resp, create_rp4_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp, deploy_rp_ovr1_resp, deploy_rp_ovr4_resp, - att_rp1_status, att_rp4_status - ] - - if ('test_dcnm_srp_override_with_existing_peering' == self._testMethodName): - - serv_nodes_resp = self.payloads_data.get('serv_nodes_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - - self.run_dcnm_send.side_effect = [have_rp6_resp, - serv_nodes_resp, - have_it_sn1_resp, - have_it_sn2_resp, - att_rp6_status, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp7_resp - ] - - if ('test_dcnm_srp_override_with_existing_peering_updated' == self._testMethodName): - - serv_nodes_resp = self.payloads_data.get('serv_nodes_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - create_rp6_resp = self.payloads_data.get('create_rp6_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - att_rp6_status = self.payloads_data.get('attach_rp6_resp') - - self.run_dcnm_send.side_effect = [have_rp6_resp, - serv_nodes_resp, - have_it_sn1_resp, - have_it_sn2_resp, - att_rp6_status, - create_rp6_resp, - att_rp6_status, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp7_resp, - deploy_rp4_rp7_resp, - att_rp6_status - ] - - if ('test_dcnm_srp_override_with_no_config' == self._testMethodName): - - serv_nodes_resp = self.payloads_data.get('serv_nodes_resp') - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') - det_rp1_rp3_resp = self.payloads_data.get('detach_rp1_rp3_resp') - det_rp4_rp7_resp = self.payloads_data.get('detach_rp4_rp7_resp') - delete_rp1_resp = self.payloads_data.get('delete_rp1_resp') - delete_rp2_resp = self.payloads_data.get('delete_rp2_resp') - delete_rp3_resp = self.payloads_data.get('delete_rp3_resp') - delete_rp4_resp = self.payloads_data.get('delete_rp4_resp') - delete_rp5_resp = self.payloads_data.get('delete_rp5_resp') - delete_rp6_resp = self.payloads_data.get('delete_rp6_resp') - delete_rp7_resp = self.payloads_data.get('delete_rp7_resp') - dd_rp1_status = self.payloads_data.get('del_deploy_rp1_resp') - dd_rp2_status = self.payloads_data.get('del_deploy_rp2_resp') - dd_rp3_status = self.payloads_data.get('del_deploy_rp3_resp') - dd_rp4_status = self.payloads_data.get('del_deploy_rp4_resp') - dd_rp5_status = self.payloads_data.get('del_deploy_rp5_resp') - dd_rp6_status = self.payloads_data.get('del_deploy_rp6_resp') - dd_rp7_status = self.payloads_data.get('del_deploy_rp7_resp') - deploy_rp1_rp3_resp = self.payloads_data.get('deploy_rp1_rp3_resp') - deploy_rp4_rp7_resp = self.payloads_data.get('deploy_rp4_rp7_resp') - - self.run_dcnm_send.side_effect = [serv_nodes_resp, - have_it_sn1_resp, have_it_sn2_resp, - det_rp1_rp3_resp, det_rp4_rp7_resp, - deploy_rp1_rp3_resp, deploy_rp4_rp7_resp, - dd_rp1_status, dd_rp2_status, dd_rp3_status, - dd_rp4_status, dd_rp5_status, dd_rp6_status, - dd_rp7_status, - delete_rp1_resp, delete_rp2_resp, delete_rp3_resp, - delete_rp4_resp, delete_rp5_resp, delete_rp6_resp, - delete_rp7_resp - ] - - if ('test_dcnm_srp_query_non_existing' == self._testMethodName): - - self.run_dcnm_send.side_effect = [[],[]] - - if ('test_dcnm_srp_query_with_service_nodes' == self._testMethodName): - - have_it_sn1_resp = self.payloads_data.get('have_it_sn1_resp') - have_it_sn2_resp = self.payloads_data.get('have_it_sn2_resp') + if "test_dcnm_srp_replace_rp1_to_rp3_non_existing" == self._testMethodName: - self.run_dcnm_send.side_effect = [have_it_sn1_resp, have_it_sn2_resp] + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") - if ('test_dcnm_srp_query_with_peer_names' == self._testMethodName): + self.run_dcnm_send.side_effect = [ + [], + [], + [], + create_rp1_resp, + create_rp2_resp, + create_rp3_resp, + deploy_rp1_rp3_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + ] + + if "test_dcnm_srp_replace_rp1_to_rp3_existing" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp2_resp = self.payloads_data.get("create_rp2_resp") + create_rp3_resp = self.payloads_data.get("create_rp3_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") - have_rp1_resp = self.payloads_data.get('have_rp1_resp') - have_rp2_resp = self.payloads_data.get('have_rp2_resp') - have_rp3_resp = self.payloads_data.get('have_rp3_resp') - have_rp4_resp = self.payloads_data.get('have_rp4_resp') - have_rp5_resp = self.payloads_data.get('have_rp5_resp') - have_rp6_resp = self.payloads_data.get('have_rp6_resp') - have_rp7_resp = self.payloads_data.get('have_rp7_resp') + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + create_rp1_resp, + att_rp1_status, + create_rp2_resp, + att_rp2_status, + create_rp3_resp, + att_rp3_status, + deploy_rp1_rp3_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + ] + + if ( + "test_dcnm_srp_replace_rp1_to_rp3_existing_no_change" + == self._testMethodName + ): + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp2_status = self.payloads_data.get("attach_rp2_resp") + att_rp3_status = self.payloads_data.get("attach_rp3_resp") - self.run_dcnm_send.side_effect = [have_rp1_resp, have_rp2_resp, have_rp3_resp, - have_rp4_resp, have_rp5_resp, have_rp6_resp, - have_rp7_resp] + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + att_rp1_status, + att_rp2_status, + att_rp3_status, + ] + + if "test_dcnm_srp_override_rp1_rp7_with_new_peerings" == self._testMethodName: + + serv_nodes_resp = self.payloads_data.get("serv_nodes_resp") + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + create_rp1_resp = self.payloads_data.get("create_rp1_resp") + create_rp4_resp = self.payloads_data.get("create_rp4_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + deploy_rp_ovr1_resp = self.payloads_data.get("deploy_rp_ovr1_resp") + deploy_rp_ovr4_resp = self.payloads_data.get("deploy_rp_ovr4_resp") + att_rp1_status = self.payloads_data.get("attach_rp1_resp") + att_rp4_status = self.payloads_data.get("attach_rp4_resp") + self.run_dcnm_send.side_effect = [ + [], + [], + serv_nodes_resp, + have_it_sn1_resp, + have_it_sn2_resp, + create_rp1_resp, + create_rp4_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp, + deploy_rp_ovr1_resp, + deploy_rp_ovr4_resp, + att_rp1_status, + att_rp4_status, + ] + + if "test_dcnm_srp_override_with_existing_peering" == self._testMethodName: + + serv_nodes_resp = self.payloads_data.get("serv_nodes_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") - def load_fixtures(self, response=None, device=''): + self.run_dcnm_send.side_effect = [ + have_rp6_resp, + serv_nodes_resp, + have_it_sn1_resp, + have_it_sn2_resp, + att_rp6_status, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp7_resp, + ] + + if ( + "test_dcnm_srp_override_with_existing_peering_updated" + == self._testMethodName + ): + + serv_nodes_resp = self.payloads_data.get("serv_nodes_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + create_rp6_resp = self.payloads_data.get("create_rp6_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + att_rp6_status = self.payloads_data.get("attach_rp6_resp") + + self.run_dcnm_send.side_effect = [ + have_rp6_resp, + serv_nodes_resp, + have_it_sn1_resp, + have_it_sn2_resp, + att_rp6_status, + create_rp6_resp, + att_rp6_status, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp7_resp, + deploy_rp4_rp7_resp, + att_rp6_status, + ] + + if "test_dcnm_srp_override_with_no_config" == self._testMethodName: + + serv_nodes_resp = self.payloads_data.get("serv_nodes_resp") + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + det_rp1_rp3_resp = self.payloads_data.get("detach_rp1_rp3_resp") + det_rp4_rp7_resp = self.payloads_data.get("detach_rp4_rp7_resp") + delete_rp1_resp = self.payloads_data.get("delete_rp1_resp") + delete_rp2_resp = self.payloads_data.get("delete_rp2_resp") + delete_rp3_resp = self.payloads_data.get("delete_rp3_resp") + delete_rp4_resp = self.payloads_data.get("delete_rp4_resp") + delete_rp5_resp = self.payloads_data.get("delete_rp5_resp") + delete_rp6_resp = self.payloads_data.get("delete_rp6_resp") + delete_rp7_resp = self.payloads_data.get("delete_rp7_resp") + dd_rp1_status = self.payloads_data.get("del_deploy_rp1_resp") + dd_rp2_status = self.payloads_data.get("del_deploy_rp2_resp") + dd_rp3_status = self.payloads_data.get("del_deploy_rp3_resp") + dd_rp4_status = self.payloads_data.get("del_deploy_rp4_resp") + dd_rp5_status = self.payloads_data.get("del_deploy_rp5_resp") + dd_rp6_status = self.payloads_data.get("del_deploy_rp6_resp") + dd_rp7_status = self.payloads_data.get("del_deploy_rp7_resp") + deploy_rp1_rp3_resp = self.payloads_data.get("deploy_rp1_rp3_resp") + deploy_rp4_rp7_resp = self.payloads_data.get("deploy_rp4_rp7_resp") + + self.run_dcnm_send.side_effect = [ + serv_nodes_resp, + have_it_sn1_resp, + have_it_sn2_resp, + det_rp1_rp3_resp, + det_rp4_rp7_resp, + deploy_rp1_rp3_resp, + deploy_rp4_rp7_resp, + dd_rp1_status, + dd_rp2_status, + dd_rp3_status, + dd_rp4_status, + dd_rp5_status, + dd_rp6_status, + dd_rp7_status, + delete_rp1_resp, + delete_rp2_resp, + delete_rp3_resp, + delete_rp4_resp, + delete_rp5_resp, + delete_rp6_resp, + delete_rp7_resp, + ] + + if "test_dcnm_srp_query_non_existing" == self._testMethodName: + + self.run_dcnm_send.side_effect = [[], []] + + if "test_dcnm_srp_query_with_service_nodes" == self._testMethodName: + + have_it_sn1_resp = self.payloads_data.get("have_it_sn1_resp") + have_it_sn2_resp = self.payloads_data.get("have_it_sn2_resp") + + self.run_dcnm_send.side_effect = [have_it_sn1_resp, have_it_sn2_resp] + + if "test_dcnm_srp_query_with_peer_names" == self._testMethodName: + + have_rp1_resp = self.payloads_data.get("have_rp1_resp") + have_rp2_resp = self.payloads_data.get("have_rp2_resp") + have_rp3_resp = self.payloads_data.get("have_rp3_resp") + have_rp4_resp = self.payloads_data.get("have_rp4_resp") + have_rp5_resp = self.payloads_data.get("have_rp5_resp") + have_rp6_resp = self.payloads_data.get("have_rp6_resp") + have_rp7_resp = self.payloads_data.get("have_rp7_resp") + + self.run_dcnm_send.side_effect = [ + have_rp1_resp, + have_rp2_resp, + have_rp3_resp, + have_rp4_resp, + have_rp5_resp, + have_rp6_resp, + have_rp7_resp, + ] + + def load_fixtures(self, response=None, device=""): # Load srp related side-effects - self.load_srp_fixtures () + self.load_srp_fixtures() self.run_dcnm_version_supported.side_effect = [11] -#################################### FIXTURES END ############################ -#################################### TEST-CASES ############################## + # -------------------------- FIXTURES END -------------------------- + # -------------------------- TEST-CASES -------------------------- - def test_dcnm_srp_merged_new (self): + def test_dcnm_srp_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_new_no_opt_elems (self): + def test_dcnm_srp_merged_new_no_opt_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config_no_opt_elems') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "create_rp1_rp7_config_no_opt_elems" + ) + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_existing_no_opt_elems (self): + def test_dcnm_srp_merged_existing_no_opt_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config_no_opt_elems') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "create_rp1_rp7_config_no_opt_elems" + ) + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_new_no_intra_fw_mand_elems (self): + def test_dcnm_srp_merged_new_no_intra_fw_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_no_intra_fw_mand_elems') + self.playbook_config = self.config_data.get("create_no_intra_fw_mand_elems") # From here we will remove one mandatory element from the config and check if that # is detected and errored out - ## No Deploy Mode object + # No Deploy Mode object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("deploy_mode") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('deploy_mode - Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("deploy_mode - Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No name object + # No name object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("name") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual(("name : Required parameter not found" in (str(e))), True) + self.assertEqual(result, None) - ## No next_hop object + # No next_hop object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("next_hop") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('next_hop : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("next_hop : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No node_name object + # No node_name object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("node_name") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) nets = ["inside_network", "outside_network"] for net in nets: - ## No Inside Name object + # No Inside Name object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0][net].pop("name") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No Inside Profile IPV4GW object + # No Inside Profile IPV4GW object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0][net]["profile"].pop("ipv4_gw") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('ipv4_gw : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("ipv4_gw : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No Inside vrf object + # No Inside vrf object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0][net].pop("vrf") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('vrf : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("vrf : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_srp_merged_new_no_inter_fw_mand_elems (self): + def test_dcnm_srp_merged_new_no_inter_fw_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_no_inter_fw_mand_elems') + self.playbook_config = self.config_data.get("create_no_inter_fw_mand_elems") # From here we will remove one mandatory element from the config and check if that # is detected and errored out @@ -1205,247 +1596,292 @@ def test_dcnm_srp_merged_new_no_inter_fw_mand_elems (self): for net in nets: - ## No Inside ipv4_lo object + # No Inside ipv4_lo object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0][net]["profile"].pop("ipv4_lo") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('ipv4_lo : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("ipv4_lo : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - ## No Inside ipv4_neighbor object + # No Inside ipv4_neighbor object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0][net]["profile"].pop("ipv4_neighbor") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('ipv4_neighbor : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("ipv4_neighbor : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_srp_merged_new_no_adc_mand_elems (self): + def test_dcnm_srp_merged_new_no_adc_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_no_adc_mand_elems') + self.playbook_config = self.config_data.get("create_no_adc_mand_elems") # From here we will remove one mandatory element from the config and check if that # is detected and errored out - ## No reverse_next_hop object + # No reverse_next_hop object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("reverse_next_hop") - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('reverse_next_hop : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) - + self.assertEqual( + ("reverse_next_hop : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_srp_merged_new_check_mode (self): + def test_dcnm_srp_merged_new_check_mode(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - _ansible_check_mode=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + _ansible_check_mode=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) - def test_dcnm_srp_merged_new_invalid_request_error (self): + def test_dcnm_srp_merged_new_invalid_request_error(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_new_invalid_fabric_error (self): + def test_dcnm_srp_merged_new_invalid_fabric_error(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_new_unauth_error (self): + def test_dcnm_srp_merged_new_unauth_error(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_config_without_state (self): + def test_dcnm_srp_config_without_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merge_no_deploy (self): + def test_dcnm_srp_merge_no_deploy(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 7) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 7) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merge_deploy_false (self): + def test_dcnm_srp_merge_deploy_false(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=False, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=False, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 7) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 7) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: @@ -1454,618 +1890,718 @@ def test_dcnm_srp_merge_deploy_false (self): def test_dcnm_srp_wrong_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='wrong_state', - attach=True, - deploy=False, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="wrong_state", + attach=True, + deploy=False, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = None try: result = self.execute_module(changed=False, failed=False) - except: - self.assertEqual (result, None) + except Exception: + self.assertEqual(result, None) - def test_dcnm_srp_merged_existing (self): + def test_dcnm_srp_merged_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_existing_and_non_existing (self): + def test_dcnm_srp_merged_existing_and_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('create_rp1_rp7_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("create_rp1_rp7_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 4) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 4) + self.assertEqual(len(result["diff"][0]["merged"]), 4) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 4) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_update_existing (self): + def test_dcnm_srp_merged_update_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('update_rp2_rp4_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("update_rp2_rp4_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 2) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 2) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_merged_update_existing_unauth_err (self): + def test_dcnm_srp_merged_update_existing_unauth_err(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('update_rp2_rp4_config') - - set_module_args(dict(state='merged', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("update_rp2_rp4_config") + + set_module_args( + dict( + state="merged", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 2) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 2) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_existing (self): + def test_dcnm_srp_delete_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_config') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_config") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_existing_no_config (self): + def test_dcnm_srp_delete_existing_no_config(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_with_no_cfg') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_with_no_cfg") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_existing_with_node_name (self): + def test_dcnm_srp_delete_existing_with_node_name(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_with_node_name') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_with_node_name") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_existing_unauth_err (self): + def test_dcnm_srp_delete_existing_unauth_err(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_config') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_config") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_existing_and_non_existing (self): + def test_dcnm_srp_delete_existing_and_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_config') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_config") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 4) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 4) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_non_existing (self): + def test_dcnm_srp_delete_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_rp1_rp7_config') - - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("delete_rp1_rp7_config") + + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_delete_no_mand_elems (self): + def test_dcnm_srp_delete_no_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('delete_no_mand_elems') + self.playbook_config = self.config_data.get("delete_no_mand_elems") # From here we will remove one mandatory element from the config and check if that # is detected and errored out - ## No node_name object + # No node_name object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("node_name") - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) - def test_dcnm_srp_replace_rp1_to_rp3_non_existing (self): + def test_dcnm_srp_replace_rp1_to_rp3_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('replace_rp1_to_rp3') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("replace_rp1_to_rp3") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 3) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 3) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_replace_rp1_to_rp3_existing (self): + def test_dcnm_srp_replace_rp1_to_rp3_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('replace_rp1_to_rp3') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("replace_rp1_to_rp3") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 3) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 3) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 3) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 3) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_replace_rp1_to_rp3_existing_no_change (self): + def test_dcnm_srp_replace_rp1_to_rp3_existing_no_change(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('replace_rp1_to_rp3_no_change') - - set_module_args(dict(state='replaced', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("replace_rp1_to_rp3_no_change") + + set_module_args( + dict( + state="replaced", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_override_rp1_rp7_with_new_peerings (self): + def test_dcnm_srp_override_rp1_rp7_with_new_peerings(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('override_with_new_peerings') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_with_new_peerings") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 2) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 2) + self.assertEqual(len(result["diff"][0]["merged"]), 2) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 2) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_override_with_existing_peering (self): + def test_dcnm_srp_override_with_existing_peering(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('override_with_existing_peering') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_with_existing_peering") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 6) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 6) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - - def test_dcnm_srp_override_with_existing_peering_updated (self): + def test_dcnm_srp_override_with_existing_peering_updated(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('override_with_existing_peering_updated') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "override_with_existing_peering_updated" + ) + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 6) - self.assertEqual(len(result["diff"][0]["modified"]) , 1) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 1) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 6) + self.assertEqual(len(result["diff"][0]["modified"]), 1) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 1) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_override_with_no_config (self): + def test_dcnm_srp_override_with_no_config(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('override_with_no_config') - - set_module_args(dict(state='overridden', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("override_with_no_config") + + set_module_args( + dict( + state="overridden", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 7) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 0) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 7) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) # Validate create and deploy responses for resp in result["response"]: self.assertEqual(resp["RETURN_CODE"], 200) - def test_dcnm_srp_query_non_existing (self): + def test_dcnm_srp_query_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('config_query_non_exist') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("config_query_non_exist") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 2) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 2) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - def test_dcnm_srp_query_with_service_nodes (self): + def test_dcnm_srp_query_with_service_nodes(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('config_query_with_node') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("config_query_with_node") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 2) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 2) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - self.assertEqual(len(result["response"]) , 7) + self.assertEqual(len(result["response"]), 7) - def test_dcnm_srp_query_with_peer_names (self): + def test_dcnm_srp_query_with_peer_names(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('config_query_with_peername') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("config_query_with_peername") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result["diff"][0]["merged"]) , 0) - self.assertEqual(len(result["diff"][0]["deleted"]) , 0) - self.assertEqual(len(result["diff"][0]["modified"]) , 0) - self.assertEqual(len(result["diff"][0]["query"]) , 7) - self.assertEqual(len(result["diff"][0]["deploy"]) , 0) + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["modified"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 7) + self.assertEqual(len(result["diff"][0]["deploy"]), 0) - self.assertEqual(len(result["response"]) , 7) + self.assertEqual(len(result["response"]), 7) - def test_dcnm_srp_query_no_mand_elems (self): + def test_dcnm_srp_query_no_mand_elems(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_srp_configs') - self.payloads_data = loadPlaybookData('dcnm_srp_payloads') + self.config_data = loadPlaybookData("dcnm_srp_configs") + self.payloads_data = loadPlaybookData("dcnm_srp_payloads") # load required config data - self.playbook_config = self.config_data.get('query_no_mand_elems') - - set_module_args(dict(state='query', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=self.playbook_config)) + self.playbook_config = self.config_data.get("query_no_mand_elems") + + set_module_args( + dict( + state="query", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) # From here we will remove one mandatory element from the config and check if that # is detected and errored out - ## No node_name object + # No node_name object cfg_no_dm = copy.deepcopy(self.playbook_config) cfg_no_dm[0].pop("node_name") - set_module_args(dict(state='deleted', - attach=True, - deploy=True, - fabric='mmudigon', - service_fabric='external', - config=cfg_no_dm)) + set_module_args( + dict( + state="deleted", + attach=True, + deploy=True, + fabric="mmudigon", + service_fabric="external", + config=cfg_no_dm, + ) + ) result = None try: result = self.execute_module(changed=True, failed=False) except Exception as e: - self.assertEqual(('node_name : Required parameter not found' in (str(e))), True) - self.assertEqual (result, None) - + self.assertEqual( + ("node_name : Required parameter not found" in (str(e))), True + ) + self.assertEqual(result, None) diff --git a/tests/unit/modules/dcnm/test_dcnm_template.py b/tests/unit/modules/dcnm/test_dcnm_template.py index b782e776a..d98e42aaa 100644 --- a/tests/unit/modules/dcnm/test_dcnm_template.py +++ b/tests/unit/modules/dcnm/test_dcnm_template.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +13,8 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch @@ -27,9 +26,11 @@ AnsibleFailJson, ) -import json, copy +import json +import copy import pytest + class TestDcnmTemplateModule(TestDcnmModule): module = dcnm_template @@ -39,22 +40,26 @@ class TestDcnmTemplateModule(TestDcnmModule): def init_data(self): pass - def log_msg (self, msg): + def log_msg(self, msg): if fd is None: fd = open("template-ut.log", "w") - self.fd.write (msg) + self.fd.write(msg) self.fd.flush() def setUp(self): super(TestDcnmTemplateModule, self).setUp() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_template.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_template.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_template.dcnm_send') - self.run_dcnm_send = self.mock_dcnm_send.start() + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_template.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() def tearDown(self): @@ -62,612 +67,709 @@ def tearDown(self): self.mock_dcnm_send.stop() self.mock_dcnm_version_supported.stop() -#################################### FIXTURES ############################ + # -------------------------- FIXTURES -------------------------- - def load_template_fixtures (self): + def load_template_fixtures(self): - if ('_template_merged_new' in self._testMethodName): + if "_template_merged_new" in self._testMethodName: # No templates exists - template1 = [] - template2 = [] - template3 = [] - template4 = [] + template1 = [] + template2 = [] + template3 = [] + template4 = [] self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.validate_resp, self.validate_resp, - self.validate_resp, self.validate_resp, - self.create_succ_resp, self.create_succ_resp, - self.create_succ_resp, self.create_succ_resp] - - if ('_template_merged_new_check_mode' in self._testMethodName): + template1, + template2, + template3, + template4, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + ] + + if "_template_merged_new_check_mode" in self._testMethodName: # No templates exists - template1 = [] - template2 = [] - template3 = [] - template4 = [] + template1 = [] + template2 = [] + template3 = [] + template4 = [] self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.validate_resp, self.validate_resp, - self.validate_resp, self.validate_resp - ] + template1, + template2, + template3, + template4, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.validate_resp, + ] - if ('_template_merged_in_use' in self._testMethodName): + if "_template_merged_in_use" in self._testMethodName: - inuse_template1 = self.payloads_data.get('template_110_inuse_have_resp') - inuse_template2 = self.payloads_data.get('template_111_inuse_have_resp') + inuse_template1 = self.payloads_data.get("template_110_inuse_have_resp") + inuse_template2 = self.payloads_data.get("template_111_inuse_have_resp") # templates exists and is also in use self.run_dcnm_send.side_effect = [ - inuse_template1, inuse_template2, - self.validate_resp, self.validate_resp, - self.create_inuse_resp, self.create_inuse_resp] + inuse_template1, + inuse_template2, + self.validate_resp, + self.validate_resp, + self.create_inuse_resp, + self.create_inuse_resp, + ] - if ('_template_merged_existing' in self._testMethodName): + if "_template_merged_existing" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') - template2 = self.payloads_data.get('template_102_have_resp') - template3 = self.payloads_data.get('template_103_have_resp') - template4 = self.payloads_data.get('template_104_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") + template2 = self.payloads_data.get("template_102_have_resp") + template3 = self.payloads_data.get("template_103_have_resp") + template4 = self.payloads_data.get("template_104_have_resp") self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.validate_resp, self.validate_resp, - self.validate_resp, self.validate_resp, - self.create_succ_resp, self.create_succ_resp, - self.create_succ_resp, self.create_succ_resp] - - if ('_template_delete_existing' in self._testMethodName): + template1, + template2, + template3, + template4, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + ] + + if "_template_delete_existing" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') - template2 = self.payloads_data.get('template_102_have_resp') - template3 = self.payloads_data.get('template_103_have_resp') - template4 = self.payloads_data.get('template_104_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") + template2 = self.payloads_data.get("template_102_have_resp") + template3 = self.payloads_data.get("template_103_have_resp") + template4 = self.payloads_data.get("template_104_have_resp") self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.delete_succ_resp] + template1, + template2, + template3, + template4, + self.delete_succ_resp, + ] - if ('_template_delete_inuse' in self._testMethodName): + if "_template_delete_inuse" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') - template2 = self.payloads_data.get('template_110_inuse_have_resp') - template3 = self.payloads_data.get('template_111_inuse_have_resp') - switches = self.payloads_data.get('template_switches') - policies = self.payloads_data.get('template_policies') + template1 = self.payloads_data.get("template_101_have_resp") + template2 = self.payloads_data.get("template_110_inuse_have_resp") + template3 = self.payloads_data.get("template_111_inuse_have_resp") + switches = self.payloads_data.get("template_switches") + policies = self.payloads_data.get("template_policies") self.run_dcnm_send.side_effect = [ - template1, template2, template3, - self.delete_inuse_resp, switches, policies] + template1, + template2, + template3, + self.delete_inuse_resp, + switches, + policies, + ] - if ('_template_delete_inuse_only' in self._testMethodName): + if "_template_delete_inuse_only" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_110_inuse_have_resp') - template2 = self.payloads_data.get('template_111_inuse_have_resp') - switches = self.payloads_data.get('template_switches') - policies = self.payloads_data.get('template_policies') + template1 = self.payloads_data.get("template_110_inuse_have_resp") + template2 = self.payloads_data.get("template_111_inuse_have_resp") + switches = self.payloads_data.get("template_switches") + policies = self.payloads_data.get("template_policies") self.run_dcnm_send.side_effect = [ - template1, template2, - self.delete_inuse_resp, switches, policies] + template1, + template2, + self.delete_inuse_resp, + switches, + policies, + ] - if ('_template_delete_non_existing' in self._testMethodName): + if "_template_delete_non_existing" in self._testMethodName: # Templates exist - template1 = [] - template2 = [] - template3 = [] - template4 = [] + template1 = [] + template2 = [] + template3 = [] + template4 = [] self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.delete_non_exist_resp] + template1, + template2, + template3, + template4, + self.delete_non_exist_resp, + ] - if ('_template_replace_existing' in self._testMethodName): + if "_template_replace_existing" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") self.run_dcnm_send.side_effect = [ - template1, - self.validate_resp, - self.create_succ_resp] + template1, + self.validate_resp, + self.create_succ_resp, + ] - if ('_template_replace_no_description' in self._testMethodName): + if "_template_replace_no_description" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") self.run_dcnm_send.side_effect = [ - template1, - self.validate_resp, - self.create_succ_resp] + template1, + self.validate_resp, + self.create_succ_resp, + ] - if ('_template_replace_no_tags' in self._testMethodName): + if "_template_replace_no_tags" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") self.run_dcnm_send.side_effect = [ - template1, - self.validate_resp, - self.create_succ_resp] + template1, + self.validate_resp, + self.create_succ_resp, + ] - if ('_template_replace_one_existing' in self._testMethodName): + if "_template_replace_one_existing" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_have_resp') - template2 = self.payloads_data.get('template_102_have_resp') - template3 = self.payloads_data.get('template_103_have_resp') - template4 = self.payloads_data.get('template_104_have_resp') + template1 = self.payloads_data.get("template_101_have_resp") + template2 = self.payloads_data.get("template_102_have_resp") + template3 = self.payloads_data.get("template_103_have_resp") + template4 = self.payloads_data.get("template_104_have_resp") self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - self.validate_resp, self.validate_resp, - self.validate_resp, self.validate_resp, - self.create_succ_resp, self.create_succ_resp, - self.create_succ_resp, self.create_succ_resp] - - if ('_template_query_existing' in self._testMethodName): + template1, + template2, + template3, + template4, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.validate_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + self.create_succ_resp, + ] + + if "_template_query_existing" in self._testMethodName: # Templates exist - template1 = self.payloads_data.get('template_101_query_resp') - template2 = self.payloads_data.get('template_102_query_resp') - template3 = self.payloads_data.get('template_103_query_resp') - template4 = self.payloads_data.get('template_104_query_resp') - switches = self.payloads_data.get('template_switches') - policies = [] + template1 = self.payloads_data.get("template_101_query_resp") + template2 = self.payloads_data.get("template_102_query_resp") + template3 = self.payloads_data.get("template_103_query_resp") + template4 = self.payloads_data.get("template_104_query_resp") + switches = self.payloads_data.get("template_switches") + policies = [] self.run_dcnm_send.side_effect = [ - template1, template2, template3, template4, - switches, policies - ] + template1, + template2, + template3, + template4, + switches, + policies, + ] - if ('_template_query_existing_inuse' in self._testMethodName): + if "_template_query_existing_inuse" in self._testMethodName: # Templates exist and in use - template1 = self.payloads_data.get('template_110_inuse_query_resp') - template2 = self.payloads_data.get('template_111_inuse_query_resp') - switches = self.payloads_data.get('template_switches') - policies = self.payloads_data.get('template_policies') + template1 = self.payloads_data.get("template_110_inuse_query_resp") + template2 = self.payloads_data.get("template_111_inuse_query_resp") + switches = self.payloads_data.get("template_switches") + policies = self.payloads_data.get("template_policies") - self.run_dcnm_send.side_effect = [ - template1, template2, switches, policies - ] + self.run_dcnm_send.side_effect = [template1, template2, switches, policies] - if ('_template_validation_fail' in self._testMethodName): + if "_template_validation_fail" in self._testMethodName: template1 = [] - self.run_dcnm_send.side_effect = [ - template1, self.validate_fail_resp - ] + self.run_dcnm_send.side_effect = [template1, self.validate_fail_resp] - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): self.run_dcnm_version_supported.side_effect = [11] # Load template related side-effects - self.load_template_fixtures () + self.load_template_fixtures() -#################################### TEST-CASES ############################ + # -------------------------- TEST-CASES -------------------------- def test_dcnm_template_wrong_state(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') + self.config_data = loadPlaybookData("dcnm_template_configs") # load required config data - self.playbook_config = self.config_data.get('template_merge_new_config') + self.playbook_config = self.config_data.get("template_merge_new_config") - set_module_args(dict(state='replaced', - config=self.playbook_config)) + set_module_args(dict(state="replaced", config=self.playbook_config)) result = None try: result = self.execute_module(changed=False, failed=False) - except: - self.assertEqual (result, None) + except Exception: + self.assertEqual(result, None) def test_dcnm_template_merged_new(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_merge_new_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_merge_new_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 4) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101', 'template_102', - 'template_103', 'template_104']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 4) + for d in result["diff"][0]["merged"]: + self.assertEqual( + ( + d["template_name"] + in ["template_101", "template_102", "template_103", "template_104"] + ), + True, + ) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) def test_dcnm_template_merged_new_check_mode(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_merge_new_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_merge_new_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config, - _ansible_check_mode=True)) + set_module_args( + dict(state="merged", config=self.playbook_config, _ansible_check_mode=True) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 4) - self.assertEqual(len(result['response']), 0) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101', 'template_102', - 'template_103', 'template_104']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 4) + self.assertEqual(len(result["response"]), 0) + for d in result["diff"][0]["merged"]: + self.assertEqual( + ( + d["template_name"] + in ["template_101", "template_102", "template_103", "template_104"] + ), + True, + ) def test_dcnm_template_merged_in_use(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_merge_inuse_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_inuse_resp = self.payloads_data.get('template_create_in_use_resp') + self.playbook_config = self.config_data.get("template_merge_inuse_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_inuse_resp = self.payloads_data.get("template_create_in_use_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) with pytest.raises(AnsibleFailJson) as failure_msg: self.execute_module(changed=False, failed=False) - fail_data = failure_msg.value.args[0]['msg'] + fail_data = failure_msg.value.args[0]["msg"] print(fail_data) - self.assertEqual(fail_data['RETURN_CODE'], 500) - self.assertEqual(fail_data['MESSAGE'], 'Internal Server Error') - self.assertRegex(fail_data['DATA'], 'Template is already in use.Cannot be overwritten') + self.assertEqual(fail_data["RETURN_CODE"], 500) + self.assertEqual(fail_data["MESSAGE"], "Internal Server Error") + self.assertRegex( + fail_data["DATA"], "Template is already in use.Cannot be overwritten" + ) def test_dcnm_template_merged_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_merge_existing_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_merge_existing_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 4) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101', 'template_102', - 'template_103', 'template_104']), True) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 4) + for d in result["diff"][0]["merged"]: + self.assertEqual( + ( + d["template_name"] + in ["template_101", "template_102", "template_103", "template_104"] + ), + True, + ) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) def test_dcnm_template_delete_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_delete_existing_config') - self.delete_succ_resp = self.payloads_data.get('template_delete_succ_resp') + self.playbook_config = self.config_data.get("template_delete_existing_config") + self.delete_succ_resp = self.payloads_data.get("template_delete_succ_resp") - set_module_args(dict(state='deleted', - config=self.playbook_config)) + set_module_args(dict(state="deleted", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 4) - for r in result['response']: - self.assertEqual (('Template deletion successful' in r['DATA']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 4) + for r in result["response"]: + self.assertEqual(("Template deletion successful" in r["DATA"]), True) def test_dcnm_template_delete_inuse(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_delete_inuse_config') - self.delete_inuse_resp = self.payloads_data.get('template_delete_inuse_resp') + self.playbook_config = self.config_data.get("template_delete_inuse_config") + self.delete_inuse_resp = self.payloads_data.get("template_delete_inuse_resp") - set_module_args(dict(state='deleted', - config=self.playbook_config)) + set_module_args(dict(state="deleted", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 3) - for r in result['response']: - self.assertEqual (('Templates in use, not deleted' in r['DATA']), True) - self.assertEqual (('template_101' not in r['DATA']), True) - self.assertEqual (('template_110' in r['DATA']), True) - self.assertEqual (('template_111' in r['DATA']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 3) + for r in result["response"]: + self.assertEqual(("Templates in use, not deleted" in r["DATA"]), True) + self.assertEqual(("template_101" not in r["DATA"]), True) + self.assertEqual(("template_110" in r["DATA"]), True) + self.assertEqual(("template_111" in r["DATA"]), True) def test_dcnm_template_delete_inuse_only(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_delete_inuse_only_config') - self.delete_inuse_resp = self.payloads_data.get('template_delete_inuse_resp') + self.playbook_config = self.config_data.get("template_delete_inuse_only_config") + self.delete_inuse_resp = self.payloads_data.get("template_delete_inuse_resp") - set_module_args(dict(state='deleted', - config=self.playbook_config)) + set_module_args(dict(state="deleted", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 2) - for r in result['response']: - self.assertEqual (('Templates in use, not deleted' in r['DATA']), True) - self.assertEqual (('template_110' in r['DATA']), True) - self.assertEqual (('template_111' in r['DATA']), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 2) + for r in result["response"]: + self.assertEqual(("Templates in use, not deleted" in r["DATA"]), True) + self.assertEqual(("template_110" in r["DATA"]), True) + self.assertEqual(("template_111" in r["DATA"]), True) def test_dcnm_template_delete_non_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_delete_non_existing_config') - self.delete_non_exist_resp = self.payloads_data.get('template_delete_no_exist_resp') - - set_module_args(dict(state='deleted', - config=self.playbook_config)) + self.playbook_config = self.config_data.get( + "template_delete_non_existing_config" + ) + self.delete_non_exist_resp = self.payloads_data.get( + "template_delete_no_exist_resp" + ) + + set_module_args(dict(state="deleted", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['deleted']), 0) - for r in result['response']: - self.assertEqual ((result['response'] == []), True) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + for r in result["response"]: + self.assertEqual((result["response"] == []), True) def test_dcnm_template_replace_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_replace_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_replace_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101']), True) - self.assertEqual (('Template_101 being replaced' in d['content']), True) - self.assertEqual (('internal policy 101 after replacement' in d['content']), True) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + self.assertEqual((d["template_name"] in ["template_101"]), True) + self.assertEqual(("Template_101 being replaced" in d["content"]), True) + self.assertEqual( + ("internal policy 101 after replacement" in d["content"]), True + ) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) def test_dcnm_template_replace_one_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_replace_one_config') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_replace_one_config") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101']), True) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + self.assertEqual((d["template_name"] in ["template_101"]), True) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) - r = result['diff'][0]['merged'] + r = result["diff"][0]["merged"] - self.assertEqual (('Template_101 being replaced' in r[0]['content']), True) - self.assertEqual (('internal policy 101 after replacement' in r[0]['content']), True) - self.assertEqual (('destination-group 1001' in r[0]['content']), True) - self.assertEqual (('port 51001' in r[0]['content']), True) - self.assertEqual (('dst-grp 1001' in r[0]['content']), True) - self.assertEqual (('snsr-grp 1001' in r[0]['content']), True) + self.assertEqual(("Template_101 being replaced" in r[0]["content"]), True) + self.assertEqual( + ("internal policy 101 after replacement" in r[0]["content"]), True + ) + self.assertEqual(("destination-group 1001" in r[0]["content"]), True) + self.assertEqual(("port 51001" in r[0]["content"]), True) + self.assertEqual(("dst-grp 1001" in r[0]["content"]), True) + self.assertEqual(("snsr-grp 1001" in r[0]["content"]), True) def test_dcnm_template_replace_no_description(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_replace_no_description') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_replace_no_description") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101']), True) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + self.assertEqual((d["template_name"] in ["template_101"]), True) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) - r = result['diff'][0]['merged'] + r = result["diff"][0]["merged"] - self.assertEqual (('Template_101' in r[0]['content']), True) - self.assertEqual (('internal policy 101 being replaced' in r[0]['content']), True) + self.assertEqual(("Template_101" in r[0]["content"]), True) + self.assertEqual( + ("internal policy 101 being replaced" in r[0]["content"]), True + ) def test_dcnm_template_replace_no_tags(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_replace_no_tags') - self.validate_resp = self.payloads_data.get('template_validate_resp') - self.create_succ_resp = self.payloads_data.get('template_create_succ_resp') + self.playbook_config = self.config_data.get("template_replace_no_tags") + self.validate_resp = self.payloads_data.get("template_validate_resp") + self.create_succ_resp = self.payloads_data.get("template_create_succ_resp") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=True, failed=False) - self.assertEqual(len(result['diff'][0]['merged']), 1) - for d in result['diff'][0]['merged']: - self.assertEqual ((d['template_name'] in ['template_101']), True) - for r in result['response']: - self.assertEqual (('Template Created' in r['DATA']['status']), True) + self.assertEqual(len(result["diff"][0]["merged"]), 1) + for d in result["diff"][0]["merged"]: + self.assertEqual((d["template_name"] in ["template_101"]), True) + for r in result["response"]: + self.assertEqual(("Template Created" in r["DATA"]["status"]), True) - r = result['diff'][0]['merged'] + r = result["diff"][0]["merged"] - self.assertEqual (('internal policy 101' in r[0]['content']), True) - self.assertEqual (('Template_101 being replaced' in r[0]['content']), True) + self.assertEqual(("internal policy 101" in r[0]["content"]), True) + self.assertEqual(("Template_101 being replaced" in r[0]["content"]), True) def test_dcnm_template_query_existing(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_query_config') + self.playbook_config = self.config_data.get("template_query_config") - set_module_args(dict(state='query', - config=self.playbook_config)) + set_module_args(dict(state="query", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['query']), 4) - self.assertEqual(len(result['response']), 4) - - r = result['response'] - - self.assertEqual (('Template_101' in r[0]['content']), True) - self.assertEqual (('internal policy 101' in r[0]['content']), True) - self.assertEqual (('destination-group 101' in r[0]['content']), True) - self.assertEqual (('port 57101' in r[0]['content']), True) - self.assertEqual (('dst-grp 101' in r[0]['content']), True) - self.assertEqual (('snsr-grp 101' in r[0]['content']), True) - - self.assertEqual (('Template_102' in r[1]['content']), True) - self.assertEqual (('internal policy 102' in r[1]['content']), True) - self.assertEqual (('destination-group 102' in r[1]['content']), True) - self.assertEqual (('port 57102' in r[1]['content']), True) - self.assertEqual (('dst-grp 102' in r[1]['content']), True) - self.assertEqual (('snsr-grp 102' in r[1]['content']), True) - - self.assertEqual (('Template_103' in r[2]['content']), True) - self.assertEqual (('internal policy 103' in r[2]['content']), True) - self.assertEqual (('destination-group 103' in r[2]['content']), True) - self.assertEqual (('port 57103' in r[2]['content']), True) - self.assertEqual (('dst-grp 103' in r[2]['content']), True) - self.assertEqual (('snsr-grp 103' in r[2]['content']), True) - - self.assertEqual (('Template_104' in r[3]['content']), True) - self.assertEqual (('internal policy 104' in r[3]['content']), True) - self.assertEqual (('destination-group 104' in r[3]['content']), True) - self.assertEqual (('port 57104' in r[3]['content']), True) - self.assertEqual (('dst-grp 104' in r[3]['content']), True) - self.assertEqual (('snsr-grp 104' in r[3]['content']), True) + self.assertEqual(len(result["diff"][0]["query"]), 4) + self.assertEqual(len(result["response"]), 4) + + r = result["response"] + + self.assertEqual(("Template_101" in r[0]["content"]), True) + self.assertEqual(("internal policy 101" in r[0]["content"]), True) + self.assertEqual(("destination-group 101" in r[0]["content"]), True) + self.assertEqual(("port 57101" in r[0]["content"]), True) + self.assertEqual(("dst-grp 101" in r[0]["content"]), True) + self.assertEqual(("snsr-grp 101" in r[0]["content"]), True) + + self.assertEqual(("Template_102" in r[1]["content"]), True) + self.assertEqual(("internal policy 102" in r[1]["content"]), True) + self.assertEqual(("destination-group 102" in r[1]["content"]), True) + self.assertEqual(("port 57102" in r[1]["content"]), True) + self.assertEqual(("dst-grp 102" in r[1]["content"]), True) + self.assertEqual(("snsr-grp 102" in r[1]["content"]), True) + + self.assertEqual(("Template_103" in r[2]["content"]), True) + self.assertEqual(("internal policy 103" in r[2]["content"]), True) + self.assertEqual(("destination-group 103" in r[2]["content"]), True) + self.assertEqual(("port 57103" in r[2]["content"]), True) + self.assertEqual(("dst-grp 103" in r[2]["content"]), True) + self.assertEqual(("snsr-grp 103" in r[2]["content"]), True) + + self.assertEqual(("Template_104" in r[3]["content"]), True) + self.assertEqual(("internal policy 104" in r[3]["content"]), True) + self.assertEqual(("destination-group 104" in r[3]["content"]), True) + self.assertEqual(("port 57104" in r[3]["content"]), True) + self.assertEqual(("dst-grp 104" in r[3]["content"]), True) + self.assertEqual(("snsr-grp 104" in r[3]["content"]), True) def test_dcnm_template_query_existing_inuse(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") # load required config data - self.playbook_config = self.config_data.get('template_query_inuse_config') + self.playbook_config = self.config_data.get("template_query_inuse_config") - set_module_args(dict(state='query', - config=self.playbook_config)) + set_module_args(dict(state="query", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - self.assertEqual(len(result['diff'][0]['query']), 2) - self.assertEqual(len(result['response']), 2) - - r = result['response'] - - self.assertEqual (('Template_110' in r[0]['content']), True) - self.assertEqual (('internal policy 110' in r[0]['content']), True) - self.assertEqual (('destination-group 101' in r[0]['content']), True) - self.assertEqual (('port 57101' in r[0]['content']), True) - self.assertEqual (('dst-grp 101' in r[0]['content']), True) - self.assertEqual (('snsr-grp 101' in r[0]['content']), True) - - self.assertEqual (('Template_111' in r[1]['content']), True) - self.assertEqual (('internal policy 111' in r[1]['content']), True) - self.assertEqual (('destination-group 102' in r[1]['content']), True) - self.assertEqual (('port 57102' in r[1]['content']), True) - self.assertEqual (('dst-grp 102' in r[1]['content']), True) - self.assertEqual (('snsr-grp 102' in r[1]['content']), True) - - self.assertEqual(len(result['template-policy-map']), 2) - - self.assertEqual((result['template-policy-map']['template_110_inuse']['POLICY-35967']['fabricName'] in ['test-fabric']), True) - self.assertEqual((result['template-policy-map']['template_110_inuse']['POLICY-35967']['serialNumber'] in ['SAL1812NTBP', 'FOX1821H035']), True) - - self.assertEqual((result['template-policy-map']['template_111_inuse']['POLICY-46328']['fabricName'] in ['test-fabric']), True) - self.assertEqual((result['template-policy-map']['template_111_inuse']['POLICY-46328']['serialNumber'] in ['SAL1812NTBP', 'FOX1821H035']), True) - + self.assertEqual(len(result["diff"][0]["query"]), 2) + self.assertEqual(len(result["response"]), 2) + + r = result["response"] + + self.assertEqual(("Template_110" in r[0]["content"]), True) + self.assertEqual(("internal policy 110" in r[0]["content"]), True) + self.assertEqual(("destination-group 101" in r[0]["content"]), True) + self.assertEqual(("port 57101" in r[0]["content"]), True) + self.assertEqual(("dst-grp 101" in r[0]["content"]), True) + self.assertEqual(("snsr-grp 101" in r[0]["content"]), True) + + self.assertEqual(("Template_111" in r[1]["content"]), True) + self.assertEqual(("internal policy 111" in r[1]["content"]), True) + self.assertEqual(("destination-group 102" in r[1]["content"]), True) + self.assertEqual(("port 57102" in r[1]["content"]), True) + self.assertEqual(("dst-grp 102" in r[1]["content"]), True) + self.assertEqual(("snsr-grp 102" in r[1]["content"]), True) + + self.assertEqual(len(result["template-policy-map"]), 2) + + self.assertEqual( + ( + result["template-policy-map"]["template_110_inuse"]["POLICY-35967"][ + "fabricName" + ] + in ["test-fabric"] + ), + True, + ) + self.assertEqual( + ( + result["template-policy-map"]["template_110_inuse"]["POLICY-35967"][ + "serialNumber" + ] + in ["SAL1812NTBP", "FOX1821H035"] + ), + True, + ) + + self.assertEqual( + ( + result["template-policy-map"]["template_111_inuse"]["POLICY-46328"][ + "fabricName" + ] + in ["test-fabric"] + ), + True, + ) + self.assertEqual( + ( + result["template-policy-map"]["template_111_inuse"]["POLICY-46328"][ + "serialNumber" + ] + in ["SAL1812NTBP", "FOX1821H035"] + ), + True, + ) def test_dcnm_template_validation_fail(self): # load the json from playbooks - self.config_data = loadPlaybookData('dcnm_template_configs') - self.payloads_data = loadPlaybookData('dcnm_template_payloads') - self.validate_fail_resp = self.payloads_data.get('template_validation_fail_resp') + self.config_data = loadPlaybookData("dcnm_template_configs") + self.payloads_data = loadPlaybookData("dcnm_template_payloads") + self.validate_fail_resp = self.payloads_data.get( + "template_validation_fail_resp" + ) # load required config data - self.playbook_config = self.config_data.get('template_validation_fail_config') + self.playbook_config = self.config_data.get("template_validation_fail_config") - set_module_args(dict(state='merged', - config=self.playbook_config)) + set_module_args(dict(state="merged", config=self.playbook_config)) result = self.execute_module(changed=False, failed=False) - - self.assertEqual(len(result['diff'][0]['failed']), 1) + self.assertEqual(len(result["diff"][0]["failed"]), 1) errored = False - for d in result['response'][0]['DATA']: - if (d['reportItemType'] == 'ERROR'): + for d in result["response"][0]["DATA"]: + if d["reportItemType"] == "ERROR": errored = True self.assertEqual(errored, True) diff --git a/tests/unit/modules/dcnm/test_dcnm_vrf.py b/tests/unit/modules/dcnm/test_dcnm_vrf.py index dd6fb603a..a83ed7bc7 100644 --- a/tests/unit/modules/dcnm/test_dcnm_vrf.py +++ b/tests/unit/modules/dcnm/test_dcnm_vrf.py @@ -1,6 +1,4 @@ -#!/usr/bin/python -# -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2020-2022 Cisco and/or its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,102 +13,144 @@ # limitations under the License. # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch + # from units.compat.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_vrf from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData -import json, copy +import json +import copy + class TestDcnmVrfModule(TestDcnmModule): module = dcnm_vrf - test_data = loadPlaybookData('dcnm_vrf') + test_data = loadPlaybookData("dcnm_vrf") SUCCESS_RETURN_CODE = 200 - mock_ip_sn = test_data.get('mock_ip_sn') - vrf_inv_data = test_data.get('vrf_inv_data') + mock_ip_sn = test_data.get("mock_ip_sn") + vrf_inv_data = test_data.get("vrf_inv_data") fabric_details = test_data.get("fabric_details") - playbook_config_input_validation = test_data.get('playbook_config_input_validation') - playbook_config = test_data.get('playbook_config') - playbook_config_update = test_data.get('playbook_config_update') - playbook_vrf_lite_config = test_data.get('playbook_vrf_lite_config') - playbook_vrf_lite_update_config = test_data.get('playbook_vrf_lite_update_config') - playbook_vrf_lite_update_vlan_config = test_data.get('playbook_vrf_lite_update_vlan_config') - playbook_vrf_lite_inv_config = test_data.get('playbook_vrf_lite_inv_config') - playbook_vrf_lite_replace_config = test_data.get('playbook_vrf_lite_replace_config') - playbook_config_update_vlan = test_data.get('playbook_config_update_vlan') - playbook_config_override = test_data.get('playbook_config_override') - playbook_config_incorrect_vrfid = test_data.get('playbook_config_incorrect_vrfid') - playbook_config_replace = test_data.get('playbook_config_replace') - playbook_config_replace_no_atch = test_data.get('playbook_config_replace_no_atch') - mock_vrf_attach_object_del_not_ready = test_data.get('mock_vrf_attach_object_del_not_ready') - mock_vrf_attach_object_del_oos = test_data.get('mock_vrf_attach_object_del_oos') - mock_vrf_attach_object_del_ready = test_data.get('mock_vrf_attach_object_del_ready') - - attach_success_resp = test_data.get('attach_success_resp') - attach_success_resp2 = test_data.get('attach_success_resp2') - attach_success_resp3 = test_data.get('attach_success_resp3') - deploy_success_resp = test_data.get('deploy_success_resp') - get_have_failure = test_data.get('get_have_failure') - error1 = test_data.get('error1') - error2 = test_data.get('error2') - error3 = test_data.get('error3') - delete_success_resp = test_data.get('delete_success_resp') - blank_data = test_data.get('blank_data') + playbook_config_input_validation = test_data.get("playbook_config_input_validation") + playbook_config = test_data.get("playbook_config") + playbook_config_update = test_data.get("playbook_config_update") + playbook_vrf_lite_config = test_data.get("playbook_vrf_lite_config") + playbook_vrf_lite_update_config = test_data.get("playbook_vrf_lite_update_config") + playbook_vrf_lite_update_vlan_config = test_data.get( + "playbook_vrf_lite_update_vlan_config" + ) + playbook_vrf_lite_inv_config = test_data.get("playbook_vrf_lite_inv_config") + playbook_vrf_lite_replace_config = test_data.get("playbook_vrf_lite_replace_config") + playbook_config_update_vlan = test_data.get("playbook_config_update_vlan") + playbook_config_override = test_data.get("playbook_config_override") + playbook_config_incorrect_vrfid = test_data.get("playbook_config_incorrect_vrfid") + playbook_config_replace = test_data.get("playbook_config_replace") + playbook_config_replace_no_atch = test_data.get("playbook_config_replace_no_atch") + mock_vrf_attach_object_del_not_ready = test_data.get( + "mock_vrf_attach_object_del_not_ready" + ) + mock_vrf_attach_object_del_oos = test_data.get("mock_vrf_attach_object_del_oos") + mock_vrf_attach_object_del_ready = test_data.get("mock_vrf_attach_object_del_ready") + + attach_success_resp = test_data.get("attach_success_resp") + attach_success_resp2 = test_data.get("attach_success_resp2") + attach_success_resp3 = test_data.get("attach_success_resp3") + deploy_success_resp = test_data.get("deploy_success_resp") + get_have_failure = test_data.get("get_have_failure") + error1 = test_data.get("error1") + error2 = test_data.get("error2") + error3 = test_data.get("error3") + delete_success_resp = test_data.get("delete_success_resp") + blank_data = test_data.get("blank_data") def init_data(self): # Some of the mock data is re-initialized after each test as previous test might have altered portions # of the mock data. - self.mock_vrf_object = copy.deepcopy(self.test_data.get('mock_vrf_object')) - self.mock_vrf_attach_object = copy.deepcopy(self.test_data.get('mock_vrf_attach_object')) - self.mock_vrf_attach_object_query = copy.deepcopy(self.test_data.get('mock_vrf_attach_object_query')) - self.mock_vrf_attach_object2 = copy.deepcopy(self.test_data.get('mock_vrf_attach_object2')) - self.mock_vrf_attach_object2_query = copy.deepcopy(self.test_data.get('mock_vrf_attach_object2_query')) - self.mock_vrf_attach_object_pending = copy.deepcopy(self.test_data.get('mock_vrf_attach_object_pending')) - self.mock_vrf_object_dcnm_only = copy.deepcopy(self.test_data.get('mock_vrf_object_dcnm_only')) - self.mock_vrf_attach_object_dcnm_only = copy.deepcopy(self.test_data.get('mock_vrf_attach_object_dcnm_only')) - self.mock_vrf_attach_get_ext_object_dcnm_att1_only = copy.deepcopy(self.test_data.get('mock_vrf_attach_get_ext_object_dcnm_att1_only')) + self.mock_vrf_object = copy.deepcopy(self.test_data.get("mock_vrf_object")) + self.mock_vrf_attach_object = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object") + ) + self.mock_vrf_attach_object_query = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object_query") + ) + self.mock_vrf_attach_object2 = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object2") + ) + self.mock_vrf_attach_object2_query = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object2_query") + ) + self.mock_vrf_attach_object_pending = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object_pending") + ) + self.mock_vrf_object_dcnm_only = copy.deepcopy( + self.test_data.get("mock_vrf_object_dcnm_only") + ) + self.mock_vrf_attach_object_dcnm_only = copy.deepcopy( + self.test_data.get("mock_vrf_attach_object_dcnm_only") + ) + self.mock_vrf_attach_get_ext_object_dcnm_att1_only = copy.deepcopy( + self.test_data.get("mock_vrf_attach_get_ext_object_dcnm_att1_only") + ) self.mock_vrf_attach_get_ext_object_dcnm_att2_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_dcnm_att2_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_dcnm_att2_only") + ) self.mock_vrf_attach_get_ext_object_merge_att1_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_merge_att1_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_merge_att1_only") + ) self.mock_vrf_attach_get_ext_object_merge_att2_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_merge_att2_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_merge_att2_only") + ) self.mock_vrf_attach_get_ext_object_merge_att3_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_merge_att3_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_merge_att3_only") + ) self.mock_vrf_attach_get_ext_object_merge_att4_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_merge_att4_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_merge_att4_only") + ) self.mock_vrf_attach_get_ext_object_ov_att1_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_ov_att1_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_ov_att1_only") + ) self.mock_vrf_attach_get_ext_object_ov_att2_only = copy.deepcopy( - self.test_data.get('mock_vrf_attach_get_ext_object_ov_att2_only')) + self.test_data.get("mock_vrf_attach_get_ext_object_ov_att2_only") + ) self.mock_vrf_attach_lite_object = copy.deepcopy( - self.test_data.get('mock_vrf_attach_lite_object')) + self.test_data.get("mock_vrf_attach_lite_object") + ) def setUp(self): super(TestDcnmVrfModule, self).setUp() - self.mock_dcnm_ip_sn = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.get_fabric_inventory_details') + self.mock_dcnm_ip_sn = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.get_fabric_inventory_details" + ) self.run_dcnm_ip_sn = self.mock_dcnm_ip_sn.start() - self.mock_dcnm_send = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_send') + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_send" + ) self.run_dcnm_send = self.mock_dcnm_send.start() - self.mock_dcnm_fabric_details = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.get_fabric_details') + self.mock_dcnm_fabric_details = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.get_fabric_details" + ) self.run_dcnm_fabric_details = self.mock_dcnm_fabric_details.start() - self.mock_dcnm_version_supported = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_version_supported') + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_version_supported" + ) self.run_dcnm_version_supported = self.mock_dcnm_version_supported.start() - self.mock_dcnm_get_url = patch('ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_get_url') + self.mock_dcnm_get_url = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_vrf.dcnm_get_url" + ) self.run_dcnm_get_url = self.mock_dcnm_get_url.start() def tearDown(self): @@ -121,654 +161,1090 @@ def tearDown(self): self.mock_dcnm_version_supported.stop() self.mock_dcnm_get_url.stop() - def load_fixtures(self, response=None, device=''): + def load_fixtures(self, response=None, device=""): self.run_dcnm_version_supported.return_value = 11 - if 'vrf_blank_fabric' in self._testMethodName: + if "vrf_blank_fabric" in self._testMethodName: self.run_dcnm_ip_sn.side_effect = [{}] else: self.run_dcnm_ip_sn.side_effect = [self.vrf_inv_data] self.run_dcnm_fabric_details.side_effect = [self.fabric_details] - if 'get_have_failure' in self._testMethodName: + if "get_have_failure" in self._testMethodName: self.run_dcnm_send.side_effect = [self.get_have_failure] - elif '_check_mode' in self._testMethodName: + elif "_check_mode" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] - - elif '_merged_new' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, self.attach_success_resp, self.deploy_success_resp] - - elif '_merged_lite_new' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, - self.attach_success_resp, self.deploy_success_resp] - - elif 'error1' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, self.error1, - self.blank_data] - - elif 'error2' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, self.error2, self.blank_data] - - elif 'error3' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, self.attach_success_resp, self.error3, - self.blank_data] - - elif '_merged_duplicate' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] + + elif "_merged_new" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "_merged_lite_new" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "error1" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.error1, + self.blank_data, + ] + + elif "error2" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.error2, + self.blank_data, + ] + + elif "error3" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.attach_success_resp, + self.error3, + self.blank_data, + ] + + elif "_merged_duplicate" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] - elif '_merged_lite_duplicate' in self._testMethodName: + elif "_merged_lite_duplicate" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + ] - elif '_merged_with_incorrect' in self._testMethodName: + elif "_merged_with_incorrect" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] - elif '_merged_with_update' in self._testMethodName: + elif "_merged_with_update" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.blank_data, self.attach_success_resp, - self.deploy_success_resp] - - elif '_merged_lite_update_vlan' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "_merged_lite_update_vlan" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.blank_data, self.attach_success_resp, - self.deploy_success_resp] - - elif '_merged_lite_update' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "_merged_lite_update" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.attach_success_resp, self.deploy_success_resp] - - elif '_merged_redeploy' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "_merged_redeploy" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object_pending, - self.blank_data, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.deploy_success_resp] - elif '_merged_lite_redeploy' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_object_pending, + self.blank_data, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.deploy_success_resp, + ] + elif "_merged_lite_redeploy" in self._testMethodName: self.init_data() - self.run_dcnm_send.side_effect = [self.mock_vrf_object, self.mock_vrf_attach_object_pending, - self.blank_data, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.deploy_success_resp] - - elif 'merged_lite_invalidrole' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_object_pending, + self.blank_data, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.deploy_success_resp, + ] + + elif "merged_lite_invalidrole" in self._testMethodName: self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data] - elif 'replace_with_no_atch' in self._testMethodName: + elif "replace_with_no_atch" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.attach_success_resp, self.deploy_success_resp, - self.delete_success_resp] - - elif 'replace_lite_no_atch' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + self.delete_success_resp, + ] + + elif "replace_lite_no_atch" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.attach_success_resp, self.deploy_success_resp, - self.delete_success_resp] - - - elif 'replace_with_changes' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.attach_success_resp, + self.deploy_success_resp, + self.delete_success_resp, + ] + + elif "replace_with_changes" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.attach_success_resp, self.deploy_success_resp, - self.delete_success_resp] - - elif 'replace_lite_changes' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + self.delete_success_resp, + ] + + elif "replace_lite_changes" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.attach_success_resp, self.deploy_success_resp, - self.delete_success_resp] - - elif 'replace_without_changes' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.attach_success_resp, + self.deploy_success_resp, + self.delete_success_resp, + ] + + elif "replace_without_changes" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] - elif 'replace_lite_without_changes' in self._testMethodName: + elif "replace_lite_without_changes" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only] - - elif 'lite_override_with_additions' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, - self.attach_success_resp, self.deploy_success_resp] - - elif 'override_with_additions' in self._testMethodName: - self.run_dcnm_send.side_effect = [self.blank_data, self.blank_data, self.attach_success_resp, - self.deploy_success_resp] - - elif 'lite_override_with_deletions' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + ] + + elif "lite_override_with_additions" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "override_with_additions" in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.blank_data, + self.blank_data, + self.attach_success_resp, + self.deploy_success_resp, + ] + + elif "lite_override_with_deletions" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.attach_success_resp, self.deploy_success_resp, - self.mock_vrf_attach_object_del_not_ready, - self.mock_vrf_attach_object_del_ready, self.delete_success_resp, - self.blank_data, - self.attach_success_resp2, self.deploy_success_resp] - - elif 'override_with_deletions' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.attach_success_resp, + self.deploy_success_resp, + self.mock_vrf_attach_object_del_not_ready, + self.mock_vrf_attach_object_del_ready, + self.delete_success_resp, + self.blank_data, + self.attach_success_resp2, + self.deploy_success_resp, + ] + + elif "override_with_deletions" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_ov_att1_only, - self.mock_vrf_attach_get_ext_object_ov_att2_only, - self.attach_success_resp, self.deploy_success_resp, - self.mock_vrf_attach_object_del_not_ready, - self.mock_vrf_attach_object_del_ready, self.delete_success_resp, - self.blank_data, - self.attach_success_resp2, self.deploy_success_resp] - - elif 'override_without_changes' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_ov_att1_only, + self.mock_vrf_attach_get_ext_object_ov_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + self.mock_vrf_attach_object_del_not_ready, + self.mock_vrf_attach_object_del_ready, + self.delete_success_resp, + self.blank_data, + self.attach_success_resp2, + self.deploy_success_resp, + ] + + elif "override_without_changes" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] - elif 'override_no_changes_lite' in self._testMethodName: + elif "override_no_changes_lite" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att3_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only,] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att3_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + ] - elif 'delete_std' in self._testMethodName: + elif "delete_std" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_dcnm_att1_only, - self.mock_vrf_attach_get_ext_object_dcnm_att2_only, - self.attach_success_resp, self.deploy_success_resp, - self.mock_vrf_attach_object_del_not_ready, - self.mock_vrf_attach_object_del_ready, - self.delete_success_resp] - - elif 'delete_std_lite' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_dcnm_att1_only, + self.mock_vrf_attach_get_ext_object_dcnm_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + self.mock_vrf_attach_object_del_not_ready, + self.mock_vrf_attach_object_del_ready, + self.delete_success_resp, + ] + + elif "delete_std_lite" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_dcnm_att1_only, - self.mock_vrf_attach_get_ext_object_dcnm_att4_only, - self.attach_success_resp, self.deploy_success_resp, - self.mock_vrf_attach_object_del_not_ready, - self.mock_vrf_attach_object_del_ready, - self.delete_success_resp] - - - elif 'delete_failure' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_dcnm_att1_only, + self.mock_vrf_attach_get_ext_object_dcnm_att4_only, + self.attach_success_resp, + self.deploy_success_resp, + self.mock_vrf_attach_object_del_not_ready, + self.mock_vrf_attach_object_del_ready, + self.delete_success_resp, + ] + + elif "delete_failure" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_dcnm_att1_only, - self.mock_vrf_attach_get_ext_object_dcnm_att2_only, - self.attach_success_resp, self.deploy_success_resp, - self.mock_vrf_attach_object_del_not_ready, - self.mock_vrf_attach_object_del_oos] - - elif 'delete_dcnm_only' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_dcnm_att1_only, + self.mock_vrf_attach_get_ext_object_dcnm_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + self.mock_vrf_attach_object_del_not_ready, + self.mock_vrf_attach_object_del_oos, + ] + + elif "delete_dcnm_only" in self._testMethodName: self.init_data() obj1 = copy.deepcopy(self.mock_vrf_attach_object_del_not_ready) obj2 = copy.deepcopy(self.mock_vrf_attach_object_del_ready) - obj1['DATA'][0].update({'vrfName': 'test_vrf_dcnm'}) - obj2['DATA'][0].update({'vrfName': 'test_vrf_dcnm'}) + obj1["DATA"][0].update({"vrfName": "test_vrf_dcnm"}) + obj2["DATA"][0].update({"vrfName": "test_vrf_dcnm"}) self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object_dcnm_only] - self.run_dcnm_send.side_effect = [self.mock_vrf_object_dcnm_only, - self.mock_vrf_attach_get_ext_object_dcnm_att1_only, self.mock_vrf_attach_get_ext_object_dcnm_att2_only, - self.attach_success_resp, self.deploy_success_resp, - obj1, - obj2, - self.delete_success_resp] - - elif 'query' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object_dcnm_only, + self.mock_vrf_attach_get_ext_object_dcnm_att1_only, + self.mock_vrf_attach_get_ext_object_dcnm_att2_only, + self.attach_success_resp, + self.deploy_success_resp, + obj1, + obj2, + self.delete_success_resp, + ] + + elif "query" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only, - self.mock_vrf_object, - self.mock_vrf_attach_object_query, self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att2_only] - - elif 'query_vrf_lite' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + self.mock_vrf_object, + self.mock_vrf_attach_object_query, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att2_only, + ] + + elif "query_vrf_lite" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.mock_vrf_object, - self.mock_vrf_attach_object2_query, self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only] - - elif 'query_vrf_lite_without_config' in self._testMethodName: + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.mock_vrf_object, + self.mock_vrf_attach_object2_query, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + ] + + elif "query_vrf_lite_without_config" in self._testMethodName: self.init_data() self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object2] - self.run_dcnm_send.side_effect = [self.mock_vrf_object, - self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only, - self.mock_vrf_object, - self.mock_vrf_attach_object2_query, self.mock_vrf_attach_get_ext_object_merge_att1_only, - self.mock_vrf_attach_get_ext_object_merge_att4_only] + self.run_dcnm_send.side_effect = [ + self.mock_vrf_object, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + self.mock_vrf_object, + self.mock_vrf_attach_object2_query, + self.mock_vrf_attach_get_ext_object_merge_att1_only, + self.mock_vrf_attach_get_ext_object_merge_att4_only, + ] else: pass def test_dcnm_vrf_blank_fabric(self): - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric test_fabric missing on DCNM or does not have any switches') + self.assertEqual( + result.get("msg"), + "Fabric test_fabric missing on DCNM or does not have any switches", + ) def test_dcnm_vrf_get_have_failure(self): - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'Fabric test_fabric not present on DCNM') + self.assertEqual(result.get("msg"), "Fabric test_fabric not present on DCNM") def test_dcnm_vrf_merged_redeploy(self): - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") def test_dcnm_vrf_merged_lite_redeploy(self): - set_module_args(dict(state='merged', - fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") def test_dcnm_vrf_check_mode(self): - set_module_args(dict(_ansible_check_mode=True, state='merged', - fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict( + _ansible_check_mode=True, + state="merged", + fabric="test_fabric", + config=self.playbook_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_vrf_merged_new(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.224') - self.assertEqual(result.get('diff')[0]['attach'][1]['ip_address'], '10.10.10.225') - self.assertEqual(result.get('diff')[0]['vrf_id'], 9008011) - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.224" + ) + self.assertEqual( + result.get("diff")[0]["attach"][1]["ip_address"], "10.10.10.225" + ) + self.assertEqual(result.get("diff")[0]["vrf_id"], 9008011) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_merged_lite_new(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.224') - self.assertEqual(result.get('diff')[0]['attach'][1]['ip_address'], '10.10.10.227') - self.assertEqual(result.get('diff')[0]['vrf_id'], 9008011) - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.224" + ) + self.assertEqual( + result.get("diff")[0]["attach"][1]["ip_address"], "10.10.10.227" + ) + self.assertEqual(result.get("diff")[0]["vrf_id"], 9008011) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_merged_duplicate(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) + self.assertFalse(result.get("diff")) def test_dcnm_vrf_merged_lite_duplicate(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) + self.assertFalse(result.get("diff")) def test_dcnm_vrf_merged_with_incorrect_vrfid(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config_incorrect_vrfid)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_config_incorrect_vrfid, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result.get('msg'), 'vrf_id for vrf:test_vrf_1 cant be updated to a different value') + self.assertEqual( + result.get("msg"), + "vrf_id for vrf:test_vrf_1 cant be updated to a different value", + ) def test_dcnm_vrf_merged_lite_invalidrole(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_vrf_lite_inv_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_inv_config, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg'], 'VRF LITE cannot be attached to switch 10.10.10.225 with role leaf') + self.assertEqual( + result["msg"], + "VRF LITE cannot be attached to switch 10.10.10.225 with role leaf", + ) def test_dcnm_vrf_merged_with_update(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config_update)) + set_module_args( + dict( + state="merged", fabric="test_fabric", config=self.playbook_config_update + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.226') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.226" + ) + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") def test_dcnm_vrf_merged_lite_update(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_vrf_lite_update_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_update_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.228') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.228" + ) + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") def test_dcnm_vrf_merged_with_update_vlan(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config_update_vlan)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_config_update_vlan, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.225') - self.assertEqual(result.get('diff')[0]['attach'][1]['ip_address'], '10.10.10.226') - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 303) - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], 303) - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.225" + ) + self.assertEqual( + result.get("diff")[0]["attach"][1]["ip_address"], "10.10.10.226" + ) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 303) + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], 303) + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_merged_lite_update_vlan(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_vrf_lite_update_vlan_config)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_vrf_lite_update_vlan_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.228') - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 402) - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.228" + ) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 402) + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_error1(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['RETURN_CODE'], 400) - self.assertEqual(result['msg']['ERROR'], 'There is an error') + self.assertEqual(result["msg"]["RETURN_CODE"], 400) + self.assertEqual(result["msg"]["ERROR"], "There is an error") def test_dcnm_vrf_error2(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertIn("Entered VRF VLAN ID 203 is in use already",str(result['msg']['DATA'].values())) + self.assertIn( + "Entered VRF VLAN ID 203 is in use already", + str(result["msg"]["DATA"].values()), + ) def test_dcnm_vrf_error3(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="merged", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertEqual(result['response'][2]['DATA'], 'No switches PENDING for deployment') + self.assertEqual( + result["response"][2]["DATA"], "No switches PENDING for deployment" + ) def test_dcnm_vrf_replace_with_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_config_replace)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + config=self.playbook_config_replace, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 203) - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 203) + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_replace_lite_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_vrf_lite_replace_config)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + config=self.playbook_vrf_lite_replace_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 202) - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 202) + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_replace_with_no_atch(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_config_replace_no_atch)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + config=self.playbook_config_replace_no_atch, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertFalse(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertNotIn('vrf_id', result.get('diff')[0]) - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertFalse(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertNotIn("vrf_id", result.get("diff")[0]) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_replace_lite_no_atch(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_config_replace_no_atch)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + config=self.playbook_config_replace_no_atch, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertFalse(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertNotIn('vrf_id', result.get('diff')[0]) - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertFalse(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertNotIn("vrf_id", result.get("diff")[0]) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_replace_without_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="replaced", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_vrf_replace_lite_without_changes(self): - set_module_args(dict(state='replaced', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="replaced", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_vrf_override_with_additions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.224') - self.assertEqual(result.get('diff')[0]['attach'][1]['ip_address'], '10.10.10.225') - self.assertEqual(result.get('diff')[0]['vrf_id'], 9008011) - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.224" + ) + self.assertEqual( + result.get("diff")[0]["attach"][1]["ip_address"], "10.10.10.225" + ) + self.assertEqual(result.get("diff")[0]["vrf_id"], 9008011) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_lite_override_with_additions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['ip_address'], '10.10.10.224') - self.assertEqual(result.get('diff')[0]['attach'][1]['ip_address'], '10.10.10.227') - self.assertEqual(result.get('diff')[0]['vrf_id'], 9008011) - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][2]['DATA']['status'], '') - self.assertEqual(result['response'][2]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual( + result.get("diff")[0]["attach"][0]["ip_address"], "10.10.10.224" + ) + self.assertEqual( + result.get("diff")[0]["attach"][1]["ip_address"], "10.10.10.227" + ) + self.assertEqual(result.get("diff")[0]["vrf_id"], 9008011) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][1]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][2]["DATA"]["status"], "") + self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_override_with_deletions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_config_override)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + config=self.playbook_config_override, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertTrue(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 303) - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], 303) - self.assertEqual(result.get('diff')[0]['vrf_id'], 9008012) - - self.assertFalse(result.get('diff')[1]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[1]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[1]['attach'][0]['vlan_id'], '202') - self.assertEqual(result.get('diff')[1]['attach'][1]['vlan_id'], '202') - self.assertEqual(result.get('diff')[1]['vrf_name'], 'test_vrf_1') - self.assertNotIn('vrf_id', result.get('diff')[1]) - - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) - self.assertEqual(result['response'][4]['DATA']['test-vrf-2--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][4]['DATA']['test-vrf-2--XYZKSJHSMK3(leaf3)'], 'SUCCESS') + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertTrue(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 303) + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], 303) + self.assertEqual(result.get("diff")[0]["vrf_id"], 9008012) + + self.assertFalse(result.get("diff")[1]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[1]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[1]["attach"][0]["vlan_id"], "202") + self.assertEqual(result.get("diff")[1]["attach"][1]["vlan_id"], "202") + self.assertEqual(result.get("diff")[1]["vrf_name"], "test_vrf_1") + self.assertNotIn("vrf_id", result.get("diff")[1]) + + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) + self.assertEqual( + result["response"][4]["DATA"]["test-vrf-2--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual( + result["response"][4]["DATA"]["test-vrf-2--XYZKSJHSMK3(leaf3)"], "SUCCESS" + ) def test_dcnm_vrf_lite_override_with_deletions(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_vrf_lite_replace_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + config=self.playbook_vrf_lite_replace_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertTrue(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], 202) - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertTrue(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], 202) + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_override_without_changes(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="overridden", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_vrf_override_no_changes_lite(self): - set_module_args(dict(state='overridden', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="overridden", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertFalse(result.get('response')) + self.assertFalse(result.get("diff")) + self.assertFalse(result.get("response")) def test_dcnm_vrf_delete_std(self): - set_module_args(dict(state='deleted', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=True, failed=False) - self.assertFalse(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertNotIn('vrf_id', result.get('diff')[0]) - - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertFalse(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertNotIn("vrf_id", result.get("diff")[0]) + + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_delete_std_lite(self): - set_module_args(dict(state='deleted', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="deleted", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=True, failed=False) - self.assertFalse(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '202') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_1') - self.assertNotIn('vrf_id', result.get('diff')[0]) - - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertFalse(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "202") + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_1") + self.assertNotIn("vrf_id", result.get("diff")[0]) + + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_delete_dcnm_only(self): - set_module_args(dict(state='deleted', fabric='test_fabric', config=[])) + set_module_args(dict(state="deleted", fabric="test_fabric", config=[])) result = self.execute_module(changed=True, failed=False) - self.assertFalse(result.get('diff')[0]['attach'][0]['deploy']) - self.assertFalse(result.get('diff')[0]['attach'][1]['deploy']) - self.assertEqual(result.get('diff')[0]['attach'][0]['vlan_id'], '402') - self.assertEqual(result.get('diff')[0]['attach'][1]['vlan_id'], '403') - self.assertEqual(result.get('diff')[0]['vrf_name'], 'test_vrf_dcnm') - self.assertNotIn('vrf_id', result.get('diff')[0]) - - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK1(leaf1)'], 'SUCCESS') - self.assertEqual(result['response'][0]['DATA']['test-vrf-1--XYZKSJHSMK2(leaf2)'], 'SUCCESS') - self.assertEqual(result['response'][1]['DATA']['status'], '') - self.assertEqual(result['response'][1]['RETURN_CODE'], self.SUCCESS_RETURN_CODE) + self.assertFalse(result.get("diff")[0]["attach"][0]["deploy"]) + self.assertFalse(result.get("diff")[0]["attach"][1]["deploy"]) + self.assertEqual(result.get("diff")[0]["attach"][0]["vlan_id"], "402") + self.assertEqual(result.get("diff")[0]["attach"][1]["vlan_id"], "403") + self.assertEqual(result.get("diff")[0]["vrf_name"], "test_vrf_dcnm") + self.assertNotIn("vrf_id", result.get("diff")[0]) + + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK1(leaf1)"], "SUCCESS" + ) + self.assertEqual( + result["response"][0]["DATA"]["test-vrf-1--XYZKSJHSMK2(leaf2)"], "SUCCESS" + ) + self.assertEqual(result["response"][1]["DATA"]["status"], "") + self.assertEqual(result["response"][1]["RETURN_CODE"], self.SUCCESS_RETURN_CODE) def test_dcnm_vrf_delete_failure(self): - set_module_args(dict(state='deleted', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="deleted", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg']['response'][2], 'Deletion of vrfs test_vrf_1 has failed') + self.assertEqual( + result["msg"]["response"][2], "Deletion of vrfs test_vrf_1 has failed" + ) def test_dcnm_vrf_query(self): - set_module_args(dict(state='query', fabric='test_fabric', config=self.playbook_config)) + set_module_args( + dict(state="query", fabric="test_fabric", config=self.playbook_config) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result.get('response')[0]['parent']['vrfName'], 'test_vrf_1') - self.assertEqual(result.get('response')[0]['parent']['vrfId'], 9008011) - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['vlan'], '202') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['vlan'], '202') + self.assertFalse(result.get("diff")) + self.assertEqual(result.get("response")[0]["parent"]["vrfName"], "test_vrf_1") + self.assertEqual(result.get("response")[0]["parent"]["vrfId"], 9008011) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0]["vlan"], + "202", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0]["vlan"], + "202", + ) def test_dcnm_vrf_query_vrf_lite(self): - set_module_args(dict(state='query', fabric='test_fabric', config=self.playbook_vrf_lite_config)) + set_module_args( + dict( + state="query", + fabric="test_fabric", + config=self.playbook_vrf_lite_config, + ) + ) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result.get('response')[0]['parent']['vrfName'], 'test_vrf_1') - self.assertEqual(result.get('response')[0]['parent']['vrfId'], 9008011) - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['vlan'], '202') - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['extensionValues'], '') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['vlan'], '202') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['extensionValues'], '') + self.assertFalse(result.get("diff")) + self.assertEqual(result.get("response")[0]["parent"]["vrfName"], "test_vrf_1") + self.assertEqual(result.get("response")[0]["parent"]["vrfId"], 9008011) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0]["vlan"], + "202", + ) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0][ + "extensionValues" + ], + "", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0]["vlan"], + "202", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0][ + "extensionValues" + ], + "", + ) def test_dcnm_vrf_query_lite_without_config(self): - set_module_args(dict(state='query', fabric='test_fabric', config=[])) + set_module_args(dict(state="query", fabric="test_fabric", config=[])) result = self.execute_module(changed=False, failed=False) - self.assertFalse(result.get('diff')) - self.assertEqual(result.get('response')[0]['parent']['vrfName'], 'test_vrf_1') - self.assertEqual(result.get('response')[0]['parent']['vrfId'], 9008011) - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['vlan'], '202') - self.assertEqual(result.get('response')[0]['attach'][0]['switchDetailsList'][0]['extensionValues'], '') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['lanAttachedState'], 'DEPLOYED') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['vlan'], '202') - self.assertEqual(result.get('response')[0]['attach'][1]['switchDetailsList'][0]['extensionValues'], '') + self.assertFalse(result.get("diff")) + self.assertEqual(result.get("response")[0]["parent"]["vrfName"], "test_vrf_1") + self.assertEqual(result.get("response")[0]["parent"]["vrfId"], 9008011) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0]["vlan"], + "202", + ) + self.assertEqual( + result.get("response")[0]["attach"][0]["switchDetailsList"][0][ + "extensionValues" + ], + "", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0][ + "lanAttachedState" + ], + "DEPLOYED", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0]["vlan"], + "202", + ) + self.assertEqual( + result.get("response")[0]["attach"][1]["switchDetailsList"][0][ + "extensionValues" + ], + "", + ) def test_dcnm_vrf_validation(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=self.playbook_config_input_validation)) + set_module_args( + dict( + state="merged", + fabric="test_fabric", + config=self.playbook_config_input_validation, + ) + ) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg'], 'ip_address is mandatory under attach parameters') + self.assertEqual( + result["msg"], "ip_address is mandatory under attach parameters" + ) def test_dcnm_vrf_validation_no_config(self): - set_module_args(dict(state='merged', fabric='test_fabric', config=[])) + set_module_args(dict(state="merged", fabric="test_fabric", config=[])) result = self.execute_module(changed=False, failed=True) - self.assertEqual(result['msg'], 'config: element is mandatory for this state merged') + self.assertEqual( + result["msg"], "config: element is mandatory for this state merged" + ) From 58cf1c5fc50b9ae45dd7c9963d984b750f1957a7 Mon Sep 17 00:00:00 2001 From: Mike Wiebe Date: Wed, 16 Mar 2022 10:00:10 -0400 Subject: [PATCH 04/17] Remove temporary ansible-test-fixes branch --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3081d16a5..182766113 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,7 +6,7 @@ name: CI on: # Triggers the workflow on push or pull request events but only for the develop branch push: - branches: [ develop, main, ansible-test-fixes ] + branches: [ develop, main ] pull_request: branches: [ develop, main ] From 09fa5033799177433caf12b1e21588dab7521264 Mon Sep 17 00:00:00 2001 From: Mike Wiebe Date: Wed, 16 Mar 2022 10:21:52 -0400 Subject: [PATCH 05/17] Skip ansible-test import test Test is currently failing. Disable and investigate --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 182766113..8a40d7f2c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -71,7 +71,7 @@ jobs: run: ansible-galaxy collection install .cache/collection-tarballs/*.tar.gz - name: Run sanity tests - run: ansible-test sanity --docker --python 3.8 -v --color --truncate 0 + run: ansible-test sanity --docker --python 3.8 -v --color --truncate 0 --skip-test import working-directory: /home/runner/.ansible/collections/ansible_collections/cisco/dcnm From f3b784aa90cd917284cc4d576f2a5b8a4285b04c Mon Sep 17 00:00:00 2001 From: mmudigon <62759545+mmudigon@users.noreply.github.com> Date: Tue, 29 Mar 2022 19:21:04 +0530 Subject: [PATCH 06/17] Fix for issue 139 - description is empty during update with polict ID (#146) --- plugins/modules/dcnm_policy.py | 16 ++++++++++++++-- .../tests/dcnm/dcnm_policy_modify.yaml | 9 +++++++++ .../dcnm/fixtures/dcnm_policy_configs.json | 2 +- tests/unit/modules/dcnm/test_dcnm_policy.py | 2 ++ 4 files changed, 26 insertions(+), 3 deletions(-) diff --git a/plugins/modules/dcnm_policy.py b/plugins/modules/dcnm_policy.py index 08e93b990..534b64f03 100644 --- a/plugins/modules/dcnm_policy.py +++ b/plugins/modules/dcnm_policy.py @@ -508,6 +508,7 @@ def dcnm_get_policy_payload_with_policy_id(self, pelem, sw): policy_payload["id"] = policy["id"] policy_payload["serialNumber"] = self.ip_sn[sw] policy_payload["policyId"] = policy["policyId"] + policy_payload["description"] = pelem["description"] policy_payload["templateName"] = policy["templateName"] policy_payload["priority"] = pelem["priority"] policy_payload["create_additional_policy"] = pelem["create_additional_policy"] @@ -725,9 +726,20 @@ def dcnm_policy_get_diff_merge(self): self.diff_create.append(policy) elif rc == "DCNM_POLICY_DONT_ADD": # A policy exists and there is no difference between the one that exists and the one that is - # is requested to be creted. Check the 'create_additional_policy' flag and crete it if it is + # is requested to be created. Check the 'create_additional_policy' flag and create it if it is # set to True if policy["create_additional_policy"] is True: + + # Check if policy is being created using policy ID. In such a case since we are trying to create + # and additional policy here, clear the "id" and "policyId" fields. Otherwise CREATE will fail + # complaining the policy being created is not unique since a policy with the same policy number + # already exists + + if "POLICY-" in policy.get("policyId", ''): + policy.pop("id") + policy.pop("policyId") + policy["policy_id_given"] = False + self.changed_dict[0]["merged"].append(policy) self.diff_create.append(policy) policy_id = None @@ -892,7 +904,7 @@ def dcnm_policy_create_policy(self, policy, command): else: fl = resp["DATA"]["failureList"] - if "is not unique" in fl["message"]: + if "is not unique" in fl.get("message", ""): retries = retries + 1 continue diff --git a/tests/integration/targets/dcnm_policy/tests/dcnm/dcnm_policy_modify.yaml b/tests/integration/targets/dcnm_policy/tests/dcnm/dcnm_policy_modify.yaml index 07171b4b1..a231346b0 100644 --- a/tests/integration/targets/dcnm_policy/tests/dcnm/dcnm_policy_modify.yaml +++ b/tests/integration/targets/dcnm_policy/tests/dcnm/dcnm_policy_modify.yaml @@ -138,6 +138,15 @@ index_var: my_idx register: result + # Assert for description being non-empty + - assert: + that: + - 'item["description"] != ""' + when: (my_idx < (result.results[0]["diff"][0]["merged"] | length)) + loop: '{{ result.results[0]["diff"][0]["merged"] }}' + loop_control: + index_var: my_idx + # Assert for Create responses - assert: that: diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_policy_configs.json b/tests/unit/modules/dcnm/fixtures/dcnm_policy_configs.json index c5601aab6..c7e195b3d 100644 --- a/tests/unit/modules/dcnm/fixtures/dcnm_policy_configs.json +++ b/tests/unit/modules/dcnm/fixtures/dcnm_policy_configs.json @@ -222,7 +222,7 @@ "modify_policy_104_with_policy_id" : [ { "create_additional_policy": false, - "description": "modifying policy with template name", + "description": "modifying policy with policy ID", "name": "POLICY-123840", "priority": 904 }, diff --git a/tests/unit/modules/dcnm/test_dcnm_policy.py b/tests/unit/modules/dcnm/test_dcnm_policy.py index 0a3f922d6..811e36780 100644 --- a/tests/unit/modules/dcnm/test_dcnm_policy.py +++ b/tests/unit/modules/dcnm/test_dcnm_policy.py @@ -1154,6 +1154,8 @@ def test_dcnm_policy_modify_with_policy_id(self): ) result = self.execute_module(changed=True, failed=False) + self.assertEqual(result["diff"][0]["merged"][0]["description"], "modifying policy with policy ID") + self.assertEqual(len(result["diff"][0]["merged"]), 1) self.assertEqual(len(result["diff"][0]["deleted"]), 0) self.assertEqual(len(result["diff"][0]["query"]), 0) From 4c82cbbf810ab06e5d2ecfd197e775617420d7fd Mon Sep 17 00:00:00 2001 From: mmudigon <62759545+mmudigon@users.noreply.github.com> Date: Tue, 29 Mar 2022 19:22:28 +0530 Subject: [PATCH 07/17] Fix for issue 137 - merge aggregate objects instead of replacing them for merge state operation (#145) * Fix for issue 137 - merge aggregate objects instead of replacing them for merged state operation * Fixed YAML lint issue * Fix for another lint isssue * More lint issues fixed --- plugins/modules/dcnm_interface.py | 63 ++- .../dcnm/dcnm_intf_multi_intf_merge.yaml | 511 ++++++++++++++++++ .../dcnm_intf_multi_intf_configs.json | 145 +++++ .../dcnm_intf_multi_intf_payloads.json | 178 +++++- tests/unit/modules/dcnm/test_dcnm_intf.py | 84 ++- 5 files changed, 964 insertions(+), 17 deletions(-) create mode 100644 tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml diff --git a/plugins/modules/dcnm_interface.py b/plugins/modules/dcnm_interface.py index 9cede625f..a2f7416ba 100644 --- a/plugins/modules/dcnm_interface.py +++ b/plugins/modules/dcnm_interface.py @@ -1172,7 +1172,7 @@ def __init__(self, module): def log_msg(self, msg): if self.fd is None: - self.fd = open("interface.log", "a+") + self.fd = open("interface.log", "w+") if self.fd is not None: self.fd.write(msg) self.fd.write("\n") @@ -1237,7 +1237,10 @@ def dcnm_intf_copy_config(self): c[ck]["fabric"] = self.dcnm_intf_facts["fabric"] if cfg["type"] == "vpc": - c[ck]["sno"] = self.vpc_ip_sn[sw] + if self.vpc_ip_sn.get(sw, None) is None: + self.module.fail_json(msg="Switch '{0}' is not part of VPC pair, but given I/F '{1}' is of type VPC".format(sw, c['name'])) + else: + c[ck]['sno'] = self.vpc_ip_sn[sw] else: c[ck]["sno"] = self.ip_sn[sw] ifname, port_id = self.dcnm_intf_get_if_name( @@ -2107,10 +2110,7 @@ def dcnm_intf_get_have(self): if intf_payload: self.have.append(intf_payload) - def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): - - # unicode encoded strings must be decoded to get proper strings which is required - # for comparison purposes + def dcnm_intf_translate_elements(self, ie1, ie2): if sys.version_info[0] >= 3: # Python version 3 onwards trfeats unicode as strings. No special treatment is required @@ -2126,6 +2126,36 @@ def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): else: e2 = ie2 + return e1, e2 + + def dcnm_intf_merge_want_and_have(self, key, wvalue, hvalue): + + comb_key = "" + e1, e2 = self.dcnm_intf_translate_elements(wvalue, hvalue) + + if "CONF" in key: + if e1 == "": + comb_key = e2 + elif e2 == "": + comb_key = e1 + else: + comb_key = e2 + '\n' + e1 + else: + if e1 == "": + comb_key = e2 + elif e2 == "": + comb_key = e1 + else: + comb_key = e2 + ',' + e1 + return comb_key + + def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): + + # unicode encoded strings must be decoded to get proper strings which is required + # for comparison purposes + + e1, e2 = self.dcnm_intf_translate_elements(ie1, ie2) + # The keys in key_translate represent a concatenated string. We should split # these strings and then compare the values key_translate = [ @@ -2137,6 +2167,8 @@ def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): "PEER2_PO_CONF", ] + merge = False + # Some keys have values given as a list which is encoded into a # string. So split that up into list and then use 'set' to process # the same irrespective of the order of elements @@ -2145,8 +2177,12 @@ def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): # MEMBER_INTERFACES, PEER1_MEMBER_INTERFACES, and PEER2_MEMBER_INTERFACES # have ',' joining differnet elements. So use a multi-delimiter split # to split with any delim - t_e1 = set(re.split(r"[\n,]", e1.strip())) - t_e2 = set(re.split(r"[\n,]", e2.strip())) + t_e1 = sorted(re.split(r"[\n,]", e1.strip())) + t_e2 = sorted(re.split(r"[\n,]", e2.strip())) + + # Merging of aggregate objects (refer objects in key_translate at the top) should happen only for "merged" state. + if state == "merged": + merge = True else: if isinstance(e1, str): t_e1 = e1.lower() @@ -2185,6 +2221,8 @@ def dcnm_intf_compare_elements(self, name, sno, fabric, ie1, ie2, k, state): # values for non-mandatory objects. return "copy_and_add" else: + if merge: + return "merge_and_add" return "add" return "dont_add" @@ -2302,6 +2340,9 @@ def dcnm_intf_compare_want_and_have(self, state): if res == "copy_and_add": want[k][0][ik][nk] = d[k][0][ik][nk] changed_dict[k][0][ik][nk] = d[k][0][ik][nk] + if (res == "merge_and_add"): + want[k][0][ik][nk] = self.dcnm_intf_merge_want_and_have(nk, want[k][0][ik][nk], d[k][0][ik][nk]) + changed_dict[k][0][ik][nk] = want[k][0][ik][nk] if res != "dont_add": action = "update" else: @@ -2326,6 +2367,9 @@ def dcnm_intf_compare_want_and_have(self, state): if res == "copy_and_add": want[k][0][ik] = d[k][0][ik] changed_dict[k][0][ik] = d[k][0][ik] + if (res == "merge_and_add"): + want[k][0][ik] = self.dcnm_intf_merge_want_and_have(ik, want[k][0][ik], d[k][0][ik]) + changed_dict[k][0][ik] = want[k][0][ik] if res != "dont_add": action = "update" else: @@ -2340,6 +2384,9 @@ def dcnm_intf_compare_want_and_have(self, state): if res == "copy_and_add": want[k] = d[k] changed_dict[k] = d[k] + if (res == "merge_and_add"): + want[k] = self.dcnm_intf_merge_want_and_have(k, want[k], d[k]) + changed_dict[k] = want[k] if res != "dont_add": action = "update" else: diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml new file mode 100644 index 000000000..4ce05666f --- /dev/null +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml @@ -0,0 +1,511 @@ +############################################## +## SETUP ## +############################################## + +- name: Remove local log file + local_action: command rm -f interface.log + +- name: Put the fabric to default state + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: overridden # only choose form [merged, replaced, deleted, overridden, query] + register: result + +- assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +- block: + +############################################## +## MERGE ## +############################################## + + - name: Create eth/sub/lo interfaces + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: "{{ ansible_eth_intf7 }}" # should be of the form eth + type: eth # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, routed, monitor, epl_routed] + speed: 'Auto' # choose from ['Auto', '100Mb', '1Gb', '10Gb', '25Gb', '40Gb', '100Gb' ] + bpdu_guard: true # choose from [true, false, 'no'] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + cmds: # Freeform config + - no shutdown + description: "eth interface acting as trunk" + + - name: lo100 # should be of the form lo + type: lo # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch where to deploy the config + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: lo # choose from [lo] + int_vrf: "" # VRF name + ipv4_addr: 192.168.1.1 # ipv4 address for the loopback interface + ipv6_addr: fd08::0201 # ipV6 address for the loopback interface + route_tag: "" # Routing Tag for the interface + cmds: # Freeform config + - no shutdown + description: "loopback interface 100 configuration" + + - name: "{{ ansible_sub_intf1 }}" # should be of the form eth. + type: sub_int # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: subint # choose from [subint] + vlan: 100 # vlan ID [min:2, max:3967] + int_vrf: "" # VRF name + ipv4_addr: 192.168.30.1 # ipv4 address for the sub-interface + ipv4_mask_len: 24 # choose between [min:8, max:31] + ipv6_addr: fd0d::0401 # ipV6 address for the sub-interface + ipv6_mask_len: 64 # choose between [min:64, max:127] + mtu: 9216 # choose between [min:576, max:9216] + cmds: # Freeform config + - no shutdown + description: "sub interface eth1/1.1 configuration" + + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 3' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 3' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify aggregate members like cmds + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: "{{ ansible_eth_intf7 }}" # should be of the form eth + type: eth # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, routed, monitor, epl_routed] + speed: 'Auto' # choose from ['Auto', '100Mb', '1Gb', '10Gb', '25Gb', '40Gb', '100Gb' ] + bpdu_guard: true # choose from [true, false, 'no'] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + cmds: # Freeform config + - spanning-tree bpduguard enable + description: "eth interface acting as trunk" + + - name: lo100 # should be of the form lo + type: lo # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch where to deploy the config + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: lo # choose from [lo] + int_vrf: "" # VRF name + ipv4_addr: 192.168.1.1 # ipv4 address for the loopback interface + ipv6_addr: fd08::0201 # ipV6 address for the loopback interface + route_tag: "" # Routing Tag for the interface + cmds: # Freeform config + - spanning-tree bpduguard enable + description: "loopback interface 100 configuration" + + - name: "{{ ansible_sub_intf1 }}" # should be of the form eth. + type: sub_int # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: subint # choose from [subint] + vlan: 100 # vlan ID [min:2, max:3967] + int_vrf: "" # VRF name + ipv4_addr: 192.168.30.1 # ipv4 address for the sub-interface + ipv4_mask_len: 24 # choose between [min:8, max:31] + ipv6_addr: fd0d::0401 # ipV6 address for the sub-interface + ipv6_mask_len: 64 # choose between [min:64, max:127] + mtu: 9216 # choose between [min:576, max:9216] + cmds: # Freeform config + - spanning-tree bpduguard enable + description: "sub interface eth1/1.1 configuration" + + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 3' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 3' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 2' + - '(result["diff"][0]["merged"][1]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 2' + - '(result["diff"][0]["merged"][2]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 2' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + +############################################## +## MERGE ## +############################################## + + - name: Create po interfaces + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: po300 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + members: # member interfaces + - "{{ ansible_eth_intf13 }}" + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + cmds: # Freeform config + - no shutdown + description: "port channel acting as trunk" + + - name: po310 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + description: "port channel acting as trunk" + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 2' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 2' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify po300 - no aggregate members like members and cmds + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: po300 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + description: "port channel acting as trunk" + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"].split(",") | length) == 1' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify po300 - new aggregate members like members and cmds + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: po300 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + members: # member interfaces + - "{{ ansible_eth_intf14 }}" + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + cmds: # Freeform config + - spanning-tree bpduguard enable + description: "port channel acting as trunk" + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 2' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"].split(",") | length) == 2' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify po310 - no aggregate members like members and cmds + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: po310 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + allowed_vlans: none # choose from [none, all, vlan range] + description: "port channel acting as trunk - updated" + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify po310 - new aggregate members like members and cmds + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: po310 # should be of the form po + type: pc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: + - "{{ ansible_switch1 }}" # provide the switch information where the config is to be deployed + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access, l3, monitor] + members: # member interfaces + - "{{ ansible_eth_intf18 }}" + pc_mode: 'on' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, no] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + cmds: # Freeform config + - spanning-tree bpduguard enable + allowed_vlans: none # choose from [none, all, vlan range] + description: "port channel acting as trunk" + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"].split(",") | length) == 1' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## MERGE ## +############################################## + + - name: Create vpc interfaces + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: vpc750 # should be of the form vpc + type: vpc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: # provide switches of vPC pair + - "{{ ansible_switch1 }}" + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access] + peer1_pcid: 120 # choose between [Min:1, Max:4096], if not given, will be VPC port-id + peer2_pcid: 120 # choose between [Min:1, Max:4096], if not given, will be VPC port-id + peer1_members: # member interfaces on peer 1 + - "{{ ansible_eth_intf15 }}" + peer2_members: # member interfaces on peer 2 + - "{{ ansible_eth_intf15 }}" + pc_mode: 'active' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, 'no'] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + peer1_allowed_vlans: none # choose from [none, all, vlan range] + peer2_allowed_vlans: none # choose from [none, all, vlan range] + peer1_description: "VPC acting as trunk peer1" + peer2_description: "VPC acting as trunk peer2" + peer1_cmds: # Freeform config + - no shutdown + peer2_cmds: # Freeform config + - no shutdown + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + - name: Modify aggregate members like peer1_cmds, peer2_cmds, peer1_members and peer2_members + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: merged # only choose form [merged, replaced, deleted, overridden, query] + config: + - name: vpc750 # should be of the form vpc + type: vpc # choose from this list [pc, vpc, sub_int, lo, eth] + switch: # provide switches of vPC pair + - "{{ ansible_switch1 }}" + deploy: true # choose from [true, false] + profile: + admin_state: true # choose from [true, false] + mode: trunk # choose from [trunk, access] + peer1_pcid: 120 # choose between [Min:1, Max:4096], if not given, will be VPC port-id + peer2_pcid: 120 # choose between [Min:1, Max:4096], if not given, will be VPC port-id + peer1_members: # member interfaces on peer 1 + - "{{ ansible_eth_intf16 }}" + peer2_members: # member interfaces on peer 2 + - "{{ ansible_eth_intf16 }}" + pc_mode: 'active' # choose from ['on', 'active', 'passive'] + bpdu_guard: true # choose from [true, false, 'no'] + port_type_fast: true # choose from [true, false] + mtu: jumbo # choose from [default, jumbo] + peer1_allowed_vlans: none # choose from [none, all, vlan range] + peer2_allowed_vlans: none # choose from [none, all, vlan range] + peer1_description: "VPC acting as trunk peer1" + peer2_description: "VPC acting as trunk peer2" + peer1_cmds: # Freeform config + - spanning-tree bpduguard enable + peer2_cmds: # Freeform config + - spanning-tree bpduguard enable + + + register: result + + - assert: + that: + - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["diff"][0]["replaced"] | length) == 0' + - '(result["diff"][0]["overridden"] | length) == 0' + - '(result["diff"][0]["deploy"] | length) == 1' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["PEER1_PO_CONF"].split("\n") | length) == 2' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["PEER2_PO_CONF"].split("\n") | length) == 2' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["PEER1_MEMBER_INTERFACES"].split(",") | length) == 2' + - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["PEER2_MEMBER_INTERFACES"].split(",") | length) == 2' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + +############################################## +## CLEANUP ## +############################################## + + always: + + - name: Put fabric to default state + cisco.dcnm.dcnm_interface: + check_deploy: True + fabric: "{{ ansible_it_fabric }}" + state: overridden # only choose form [merged, replaced, deleted, overridden, query] + register: result + when: IT_CONTEXT is not defined + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + when: IT_CONTEXT is not defined diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_configs.json b/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_configs.json index 4d5577dd9..8555beefb 100644 --- a/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_configs.json +++ b/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_configs.json @@ -191,6 +191,151 @@ "deploy": "True" }], + "multi_intf_merged_config_exist" : [ + { + "switch": [ + "192.168.1.108" + ], + "profile": { + "description": "port channel acting as trunk", + "bpdu_guard": "True", + "sno": "SAL1819SAN8", + "mtu": "jumbo", + "pc_mode": "on", + "mode": "trunk", + "members": [ + "e1/29" + ], + "port_type_fast": "True", + "policy": "int_port_channel_trunk_host_11_1", + "admin_state": "True", + "allowed_vlans": "none", + "cmds": [ + "spanning-tree bpduguard enable" + ], + "ifname": "Port-channel300", + "fabric": "test_fabric" + }, + "type": "pc", + "name": "po300", + "deploy": "True" + }, + { + "profile": { + "peer2_pcid": 1, + "fabric": "test_fabric", + "bpdu_guard": "True", + "pc_mode": "on", + "peer1_members": [ + "e1/24" + ], + "peer2_members": [ + "e1/24" + ], + "peer2_cmds": [ + "spanning-tree bpduguard enable" + ], + "peer1_pcid": 1, + "mtu": "jumbo", + "peer1_cmds": [ + "spanning-tree bpduguard enable" + ], + "peer1_allowed_vlans": "none", + "mode": "trunk", + "policy": "int_vpc_trunk_host_11_1", + "port_type_fast": "True", + "peer2_description": "VPC acting as trunk peer2", + "admin_state": "True", + "ifname": "vPC301", + "peer1_description": "VPC acting as trunk peer1", + "sno": "FOX1821H035~SAL1819SAN8", + "peer2_allowed_vlans": "none" + }, + "switch": [ + "192.168.1.109", + "192.168.1.108" + ], + "type": "vpc", + "name": "vpc301", + "deploy": "True" + }, + { + "type": "eth", + "switch": [ + "192.168.1.108" + ], + "profile": { + "description": "eth interface acting as trunk", + "bpdu_guard": "True", + "sno": "SAL1819SAN8", + "mtu": "jumbo", + "admin_state": "True", + "mode": "trunk", + "port_type_fast": "True", + "policy": "int_trunk_host_11_1", + "allowed_vlans": "none", + "cmds": [ + "spanning-tree bpduguard enable" + ], + "speed": "auto", + "ifname": "Ethernet1/10", + "fabric": "test_fabric" + }, + "name": "eth1/10", + "deploy": "True" + }, + { + "switch": [ + "192.168.1.108" + ], + "deploy": "True", + "type": "sub_int", + "name": "eth1/1.1", + "profile": { + "ipv6_addr": "", + "int_vrf": "", + "ipv4_mask_len": 24, + "ipv6_mask_len": 64, + "fabric": "test_fabric", + "sno": "SAL1819SAN8", + "vlan": 100, + "mtu": 9216, + "ipv4_addr": "1.1.1.1", + "mode": "subint", + "policy": "int_subif_11_1", + "admin_state": "True", + "ifname": "Ethernet1/1.1", + "cmds": [ + "spanning-tree bpduguard enable" + ], + "description": "sub interface eth25/1.1 configuration" + } + }, + { + "switch": [ + "192.168.1.108" + ], + "profile": { + "ipv6_addr": "", + "int_vrf": "", + "description": "loopback interface 100 configuration", + "sno": "SAL1819SAN8", + "cmds": [ + "spanning-tree bpduguard enable" + ], + "ipv4_addr": "100.10.10.1", + "mode": "lo", + "policy": "int_loopback_11_1", + "admin_state": "True", + "ifname": "Loopback303", + "route_tag": "", + "fabric": "test_fabric" + }, + "type": "lo", + "name": "lo303", + "deploy": "True" + }], + "missing_intf_elems_config" : [ { "switch": [ diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_payloads.json b/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_payloads.json index b89ea7583..b8385e4fe 100644 --- a/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_payloads.json +++ b/tests/unit/modules/dcnm/fixtures/dcnm_intf_multi_intf_payloads.json @@ -34,5 +34,181 @@ ], "RETURN_CODE": 200, "METHOD": "GET" - } + }, + + "pc_payload": + { + "MESSAGE": "OK", + "REQUEST_PATH": "https://10.122.197.6:443/rest/interface?serialNumber=SAL1819SAN8", + "RETURN_CODE": 200, + "METHOD": "GET", + "DATA": [ + { + "policy": "int_port_channel_trunk_host_11_1", + "interfaceType": "INTERFACE_PORT_CHANNEL", + "interfaces": [ + { + "serialNumber": "SAL1819SAN8", + "interfaceType": "INTERFACE_PORT_CHANNEL", + "ifName": "Port-channel300", + "fabricName": "test_fabric", + "nvPairs": { + "SPEED": "Auto", + "MEMBER_INTERFACES": "e1/9", + "PC_MODE": "on", + "BPDUGUARD_ENABLED": "true", + "PORTTYPE_FAST_ENABLED": "true", + "MTU": "jumbo", + "ALLOWED_VLANS": "none", + "PO_ID": "Port-channel300", + "DESC": "port channel acting as trunk", + "CONF": "no shutdown", + "ADMIN_STATE": "true" + } + } + ], + "skipResourceCheck": "false" + }] + }, + "vpc_payload": + { + "MESSAGE": "OK", + "REQUEST_PATH": "https://10.122.197.6:443/rest/interface?serialNumber=SAL1819SAN8", + "RETURN_CODE": 200, + "METHOD": "GET", + "DATA": [ + { + "policy": "int_vpc_trunk_host_11_1", + "interfaceType": "INTERFACE_VPC", + "interfaces": [ + { + "serialNumber": "FOX1821H035~SAL1819SAN8", + "interfaceType": "INTERFACE_VPC", + "ifName": "vPC301", + "fabricName": "test_fabric", + "nvPairs": { + "SPEED": "Auto", + "PEER1_MEMBER_INTERFACES": "e1/14", + "PEER2_MEMBER_INTERFACES": "e1/14", + "PC_MODE": "on", + "BPDUGUARD_ENABLED": "true", + "PORTTYPE_FAST_ENABLED": "true", + "MTU": "jumbo", + "PEER1_ALLOWED_VLANS": "none", + "PEER2_ALLOWED_VLANS": "none", + "PEER1_PCID": "1", + "PEER2_PCID": "1", + "PEER1_PO_DESC": "VPC acting as trunk peer1", + "PEER2_PO_DESC": "VPC acting as trunk peer2", + "PEER1_PO_CONF": "no shutdown", + "PEER2_PO_CONF": "no shutdown", + "ADMIN_STATE": "true", + "INTF_NAME": "vPC301" + } + } + ], + "skipResourceCheck": "false" + }] + }, + + "eth_payload": + { + "MESSAGE": "OK", + "REQUEST_PATH": "https://10.122.197.6:443/rest/interface?serialNumber=SAL1819SAN8", + "RETURN_CODE": 200, + "METHOD": "GET", + "DATA": [ + { + "policy": "int_trunk_host_11_1", + "interfaceType": "INTERFACE_ETHERNET", + "interfaces": [ + { + "serialNumber": "SAL1819SAN8", + "interfaceType": "INTERFACE_ETHERNET", + "ifName": "Ethernet1/10", + "fabricName": "test_fabric", + "nvPairs": { + "SPEED": "auto", + "BPDUGUARD_ENABLED": "true", + "PORTTYPE_FAST_ENABLED": "true", + "MTU": "jumbo", + "ALLOWED_VLANS": "none", + "INTF_NAME": "Ethernet1/10", + "DESC": "eth interface acting as trunk", + "CONF": "no shutdown", + "ADMIN_STATE": "true" + } + } + ] + }] + }, + + "subint_payload": + { + "MESSAGE": "OK", + "REQUEST_PATH": "https://10.122.197.6:443/rest/interface?serialNumber=SAL1819SAN8", + "RETURN_CODE": 200, + "METHOD": "GET", + "DATA": [ + { + "policy": "int_subif_11_1", + "interfaceType": "SUBINTERFACE", + "interfaces": [ + { + "serialNumber": "SAL1819SAN8", + "interfaceType": "SUBINTERFACE", + "ifName": "Ethernet1/1.1", + "fabricName": "test_fabric", + "nvPairs": { + "SPEED": "Auto", + "VLAN": "100", + "INTF_VRF": "", + "IP": "1.1.1.1", + "PREFIX": "24", + "IPv6": "", + "IPv6_PREFIX": "", + "MTU": "9216", + "INTF_NAME": "Ethernet1/1.1", + "DESC": "sub interface eth25/1.1 configuration", + "CONF": "no shutdown", + "ADMIN_STATE": "true" + } + } + ], + "skipResourceCheck": "false" + }] + }, + + "lo_payload": + { + "MESSAGE": "OK", + "REQUEST_PATH": "https://10.122.197.6:443/rest/interface?serialNumber=SAL1819SAN8", + "RETURN_CODE": 200, + "METHOD": "GET", + "DATA": [ + { + "policy": "int_loopback_11_1", + "interfaceType": "INTERFACE_LOOPBACK", + "interfaces": [ + { + "serialNumber": "SAL1819SAN8", + "interfaceType": "INTERFACE_LOOPBACK", + "ifName": "Loopback303", + "fabricName": "test_fabric", + "nvPairs": { + "SPEED": "Auto", + "INTF_VRF": "", + "IP": "100.10.10.1", + "V6IP": "", + "ROUTE_MAP_TAG": "", + "INTF_NAME": "Loopback303", + "DESC": "loopback interface 100 configuration", + "CONF": "no shutdown", + "ADMIN_STATE": "true" + } + } + ], + "skipResourceCheck": "false" + }] + } } diff --git a/tests/unit/modules/dcnm/test_dcnm_intf.py b/tests/unit/modules/dcnm/test_dcnm_intf.py index 2b4508dce..40df8299a 100644 --- a/tests/unit/modules/dcnm/test_dcnm_intf.py +++ b/tests/unit/modules/dcnm/test_dcnm_intf.py @@ -31,22 +31,18 @@ class TestDcnmIntfModule(TestDcnmModule): module = dcnm_interface - fd = None def init_data(self): - pass + self.fd = None def log_msg(self, msg): - if fd is None: - fd = open("intf-ut.log", "w") + if self.fd is None: + self.fd = open("intf-ut.log", "w") self.fd.write(msg) self.fd.flush() - def log_msg(self, msg): - self.fd.write(msg) - def setUp(self): super(TestDcnmIntfModule, self).setUp() @@ -122,6 +118,42 @@ def load_multi_intf_fixtures(self): playbook_deployed_data, ] + if ("_multi_intf_merged_exist" in self._testMethodName): + # No I/F exists case + playbook_pc_intf = self.payloads_data.get("pc_payload") + playbook_lo_intf = self.payloads_data.get("lo_payload") + playbook_eth_intf = self.payloads_data.get("eth_payload") + playbook_subint_intf = self.payloads_data.get("subint_payload") + playbook_vpc_intf = self.payloads_data.get("vpc_payload") + playbook_have_all_data = self.have_all_payloads_data.get("payloads") + playbook_deployed_data = self.have_all_payloads_data.get("deployed_payloads") + + self.run_dcnm_send.side_effect = [ + self.playbook_mock_vpc_resp, + self.playbook_mock_vpc_resp, + playbook_pc_intf, + playbook_vpc_intf, + playbook_subint_intf, + playbook_lo_intf, + playbook_eth_intf, + playbook_have_all_data, + playbook_have_all_data, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + self.playbook_mock_succ_resp, + playbook_deployed_data] + def load_missing_intf_elems_fixtures(self): if "_missing_intf_elems" in self._testMethodName: @@ -1364,6 +1396,39 @@ def test_dcnm_intf_multi_intf_merged_new(self): True, ) + def test_dcnm_intf_multi_intf_merged_exist(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_intf_multi_intf_configs") + self.have_all_payloads_data = loadPlaybookData("dcnm_intf_have_all_payloads") + self.payloads_data = loadPlaybookData("dcnm_intf_multi_intf_payloads") + + # load required config data + self.playbook_config = self.config_data.get("multi_intf_merged_config_exist") + self.playbook_mock_succ_resp = self.config_data.get("mock_succ_resp") + self.playbook_mock_vpc_resp = self.config_data.get("mock_vpc_resp") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args(dict(state="merged", + fabric="test_fabric", + config=self.playbook_config)) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 5) + for d in result["diff"][0]["merged"]: + for intf in d["interfaces"]: + self.assertEqual((intf["ifName"] in ["Port-channel300", + "vPC301", + "Ethernet1/1.1", + "Ethernet1/10", + "Loopback303"]), True) + for key in intf["nvPairs"]: + if "MEMBER_INTERFACES" in key: + self.assertEqual(len(intf["nvPairs"][key].split(",")), 2) + if "CONF" in key: + self.assertEqual(len(intf["nvPairs"][key].split("\n")), 2) + def test_dcnm_intf_missing_intf_elems_merged_new(self): # load the json from playbooks @@ -1593,6 +1658,7 @@ def test_dcnm_intf_pc_replaced_existing(self): "PREFIX", "ROUTING_TAG", "SPEED", + "CONF" ] for d in result["diff"][0]["replaced"]: @@ -2436,7 +2502,9 @@ def test_dcnm_intf_vpc_replaced_existing(self): "PEER2_ACCESS_VLAN", "PEER1_CONF", "PEER2_CONF", - "INTF_NAME", + "PEER1_PO_CONF", + "PEER2_PO_CONF", + "INTF_NAME" ] for d in result["diff"][0]["replaced"]: From 6e70b3a7dcf5b53a76a0e0ab6ff20f42b8552a11 Mon Sep 17 00:00:00 2001 From: mmudigon <62759545+mmudigon@users.noreply.github.com> Date: Tue, 10 May 2022 19:06:40 +0530 Subject: [PATCH 08/17] Resource Manager module initial checkin (#147) * Resource Manager module initial checkin * Addressed review comments, added new test cases as suggested * Set ansible.netcommon version to 2.6.1 * Renamed dcnm_res_manager as dcnm_resource_manager as per review comments and also added correct import for mock in unit test files * Changed galaxy.yaml to refer to latest version of netcommon * Modified the IT directory to dcnm_resource_manager to make the ansible-playbook command work Co-authored-by: Mike Wiebe --- README.md | 1 + docs/cisco.dcnm.dcnm_interface_module.rst | 36 +- docs/cisco.dcnm.dcnm_network_module.rst | 3 +- ...isco.dcnm.dcnm_resource_manager_module.rst | 407 ++++++ docs/cisco.dcnm.dcnm_rest_module.rst | 4 +- .../cisco.dcnm.dcnm_service_policy_module.rst | 16 +- ...dcnm.dcnm_service_route_peering_module.rst | 1 - docs/cisco.dcnm.dcnm_template_module.rst | 20 + plugins/modules/dcnm_resource_manager.py | 1274 ++++++++++++++++ .../dcnm_resource_manager/defaults/main.yaml | 2 + .../dcnm_resource_manager/meta/main.yaml | 1 + .../dcnm_resource_manager/tasks/dcnm.yaml | 20 + .../dcnm_resource_manager/tasks/main.yaml | 2 + .../tests/dcnm/dcnm_res_manager_delete.yaml | 270 ++++ .../dcnm/dcnm_res_manager_invalid_params.yaml | 130 ++ .../tests/dcnm/dcnm_res_manager_merge.yaml | 373 +++++ .../tests/dcnm/dcnm_res_manager_query.yaml | 331 +++++ tests/sanity/ignore-2.10.txt | 3 +- tests/sanity/ignore-2.11.txt | 3 +- tests/sanity/ignore-2.12.txt | 3 +- tests/sanity/ignore-2.9.txt | 3 +- .../fixtures/dcnm_res_manager_configs.json | 431 ++++++ .../fixtures/dcnm_res_manager_payloads.json | 1276 +++++++++++++++++ tests/unit/modules/dcnm/test_dcnm_intf.py | 2 +- .../unit/modules/dcnm/test_dcnm_inventory.py | 2 +- tests/unit/modules/dcnm/test_dcnm_policy.py | 2 +- .../modules/dcnm/test_dcnm_res_manager.py | 1113 ++++++++++++++ .../modules/dcnm/test_dcnm_service_node.py | 2 +- .../modules/dcnm/test_dcnm_service_policy.py | 2 +- .../dcnm/test_dcnm_service_route_peering.py | 2 +- tests/unit/modules/dcnm/test_dcnm_template.py | 2 +- tests/unit/modules/dcnm/test_dcnm_vrf.py | 2 +- 32 files changed, 5697 insertions(+), 42 deletions(-) create mode 100644 docs/cisco.dcnm.dcnm_resource_manager_module.rst create mode 100644 plugins/modules/dcnm_resource_manager.py create mode 100644 tests/integration/targets/dcnm_resource_manager/defaults/main.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/meta/main.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tasks/dcnm.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tasks/main.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_delete.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_invalid_params.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_merge.yaml create mode 100644 tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_query.yaml create mode 100644 tests/unit/modules/dcnm/fixtures/dcnm_res_manager_configs.json create mode 100644 tests/unit/modules/dcnm/fixtures/dcnm_res_manager_payloads.json create mode 100644 tests/unit/modules/dcnm/test_dcnm_res_manager.py diff --git a/README.md b/README.md index 3aa0becd2..bc62bce60 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,7 @@ Name | Description [cisco.dcnm.dcnm_inventory](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_inventory_module.rst)|Add and remove Switches from a DCNM managed VXLAN fabric. [cisco.dcnm.dcnm_network](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_network_module.rst)|Add and remove Networks from a DCNM managed VXLAN fabric. [cisco.dcnm.dcnm_policy](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_policy_module.rst)|DCNM Ansible Module for managing policies. +[cisco.dcnm.dcnm_resource_manager](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_resource_manager_module.rst)|DCNM ansible module for managing resources. [cisco.dcnm.dcnm_rest](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_rest_module.rst)|Send REST API requests to DCNM controller. [cisco.dcnm.dcnm_service_node](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_service_node_module.rst)|Create/Modify/Delete service node based on type and attached interfaces from a DCNM managed VXLAN fabric. [cisco.dcnm.dcnm_service_policy](https://github.com/CiscoDevNet/ansible-dcnm/blob/main/docs/cisco.dcnm.dcnm_service_policy_module.rst)|DCNM ansible module for managing service policies. diff --git a/docs/cisco.dcnm.dcnm_interface_module.rst b/docs/cisco.dcnm.dcnm_interface_module.rst index 551dea4ec..6cf3e14e4 100644 --- a/docs/cisco.dcnm.dcnm_interface_module.rst +++ b/docs/cisco.dcnm.dcnm_interface_module.rst @@ -293,8 +293,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:31
  • +
  • Min 1
  • +
  • Max 31
Default:
8
@@ -333,8 +333,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:31
  • +
  • Min 1
  • +
  • Max 31
Default:
8
@@ -763,8 +763,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:31
  • +
  • Min 1
  • +
  • Max 31
Default:
8
@@ -957,8 +957,8 @@ Parameters
    Choices: -
  • Min:8
  • -
  • Max:31
  • +
  • Min 8
  • +
  • Max 31
Default:
8
@@ -997,8 +997,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:31
  • +
  • Min 1
  • +
  • Max 31
Default:
8
@@ -1040,8 +1040,8 @@ Parameters
    Choices: -
  • {'Min': 576}
  • -
  • {'Max': 9216}
  • +
  • Min 576
  • +
  • Max 9216
Default:
9216
@@ -1062,8 +1062,8 @@ Parameters
    Choices: -
  • {'Min': 2}
  • -
  • {'Max': 3967}
  • +
  • Min 2
  • +
  • Max 3967
Default:
0
@@ -1306,8 +1306,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:4096
  • +
  • Min 1
  • +
  • Max 4096
Default:
"Default value is the vPC port identifier"
@@ -1423,8 +1423,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:4096
  • +
  • Min 1
  • +
  • Max 4096
Default:
"Default value is the vPC port identifier"
diff --git a/docs/cisco.dcnm.dcnm_network_module.rst b/docs/cisco.dcnm.dcnm_network_module.rst index 388ed2f81..dc1cfa495 100644 --- a/docs/cisco.dcnm.dcnm_network_module.rst +++ b/docs/cisco.dcnm.dcnm_network_module.rst @@ -180,6 +180,7 @@ Parameters
Loopback ID for DHCP Relay interface
+
Configured ID value should be in range 0-1023
@@ -532,7 +533,7 @@ Parameters Examples -------- -.. code-block:: yaml+jinja +.. code-block:: yaml # This module supports the following states: # diff --git a/docs/cisco.dcnm.dcnm_resource_manager_module.rst b/docs/cisco.dcnm.dcnm_resource_manager_module.rst new file mode 100644 index 000000000..8cf87423d --- /dev/null +++ b/docs/cisco.dcnm.dcnm_resource_manager_module.rst @@ -0,0 +1,407 @@ +.. _cisco.dcnm.dcnm_resource_manager_module: + + +******************************** +cisco.dcnm.dcnm_resource_manager +******************************** + +**DCNM ansible module for managing resources.** + + +Version added: 2.1.0 + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- DCNM ansible module for creating, deleting and querying resources + + + + +Parameters +---------- + +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ParameterChoices/DefaultsComments
+
+ config + +
+ list + / elements=dictionary +
+
+ +
A list of dictionaries containing resources and switch information
+
+
+ entity_name + +
+ string + / required +
+
+ +
A unique name which identifies the entity to which the resourcce is allocated to.
+
The format of this parameter depends on the scope_type. The details are provided in
+
the EXAMPLES section
+
+
+ pool_name + +
+ string + / required +
+
+ +
Name of the resource pool from which the resource is allocated
+
+
+ pool_type + +
+ string + / required +
+
+
    Choices: +
  • ID
  • +
  • IP
  • +
  • SUBNET
  • +
+
+
Type of resource pool
+
+
+ resource + +
+ string + / required +
+
+ +
Value of the resource being allocated
+
The value will be
+
an integer if pool_type is ID
+
an IPV4/IPV6 address if pool_type is IP
+
an IPV4 address/net_mask or IPV6 address/net_maskif pool_type is SUBNET
+
+
+ scope_type + +
+ string + / required +
+
+
    Choices: +
  • fabric
  • +
  • device
  • +
  • device_interface
  • +
  • device_pair
  • +
  • link
  • +
+
+
Socpe of resource allocation
+
+
+ switch + +
+ list +
+
+ +
IP address or DNS name of the management interface of the switch to which the allocated resource is assigned to.
+
+
+ fabric + +
+ string + / required +
+
+ +
Name of the target fabric for resource manager operations
+
+
+ state + +
+ string +
+
+
    Choices: +
  • merged ←
  • +
  • deleted
  • +
  • query
  • +
+
+
The required state of the configuration after module completion.
+
+
+ + + + +Examples +-------- + +.. code-block:: yaml + + # Entity name format + # ================== + # + # The format of the entity name depends on the scope_type of the resource being allocated. + + # Scope Type Entity Name + # ===================================== + # Fabric Eg: My_Network_30000 + # Device Eg: loopback0 + # Device Pair Eg: FDO21331S8T~FDO21332E6X~vPC1 + # Device Interface Eg: FDO21332E6X~Ethernet1/13 + # Link Eg: FDO21332E6X~Ethernet1/3~FDO21331S8T~Ethernet1/3 + + # where FDO21331S8T and FDO21331S8T are switch serial numbers + + # This module supports the following states: + + # Merged: + # Resources defined in the playbook will be merged into the target fabric. + # - If the Resources does not exist it will be added. + # - If the Resources exists but properties managed by the playbook are different + # they will be updated if possible. + # - Resources that are not specified in the playbook will be untouched. + # + # Deleted: + # Resources defined in the playbook will be deleted. + # + # Query: + # Returns the current DCNM state for the Resources listed in the playbook. + + # CREATING RESOURCES + # ================== + - name: Create Resources + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + - 192.175.1.2 + resource: "500" # The value of the resource being created + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + resource: "fe:80::04" # The value of the resource being created + + - entity_name: "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + resource: "fe:80:05::05/64" + + # DELETING RESOURCES + # ================== + + - name: Delete Resources + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - 192.175.1.2 + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + + - entity_name: "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + + # QUERY SERVICE POLICIES + # ====================== + + - name: Query all Resources - no filters + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + + - name: Query Resources - filter by entity name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + - entity_name: "loopback_dev" # A unique name to identify the resource + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + - entity_name: "9M99N34RDED~Ethernet1/2~~9NXHSNTEO6CEthernet1/2" # A unique name to identify the resource + + - name: Query Resources - filter by switch + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + + - name: Query Resources - filter by fabric and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + - pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + + - name: Query Resources - filter by switch and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.2 + + - name: Query Resources - mixed query + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + - switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + + + + +Status +------ + + +Authors +~~~~~~~ + +- Mallik Mudigonda (@mmudigon) diff --git a/docs/cisco.dcnm.dcnm_rest_module.rst b/docs/cisco.dcnm.dcnm_rest_module.rst index f07894b94..cf392f941 100644 --- a/docs/cisco.dcnm.dcnm_rest_module.rst +++ b/docs/cisco.dcnm.dcnm_rest_module.rst @@ -110,7 +110,7 @@ Examples path: /rest/control/fabrics - name: Set deployment to false in lanAttachList for vrf - dcnm_rest: + dcnm_rest: method: POST path: /rest/top-down/fabrics/fabric1/vrfs/attachments json_data: '[{"vrfName":"sales66_vrf1","lanAttachList":[{"fabric":"fabric1","vrfName":"sales66_vrf1","serialNumber":"FDO21392QKM","vlan":2000,"freeformConfig":"","deployment":false,"extensionValues":"","instanceValues":"{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}"}]}]' @@ -120,7 +120,7 @@ Examples data: "{{ lookup('file', 'validate_payload') }}" - name: Validate a template - cisco.dcnm.dcnm_rest: + cisco.dcnm.dcnm_rest: method: POST path: /fm/fmrest/config/templates/validate json_data: "{{ data }}" diff --git a/docs/cisco.dcnm.dcnm_service_policy_module.rst b/docs/cisco.dcnm.dcnm_service_policy_module.rst index a317481f3..4ed7a7536 100644 --- a/docs/cisco.dcnm.dcnm_service_policy_module.rst +++ b/docs/cisco.dcnm.dcnm_service_policy_module.rst @@ -207,8 +207,8 @@ Parameters
    Choices:
  • any
  • -
  • Min:1
  • -
  • Max:65535
  • +
  • Min 1
  • +
  • Max 65535
@@ -292,8 +292,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:65535)
  • +
  • Min 1
  • +
  • Max 65535)
Default:
"will be auto-generated by DCNM"
@@ -314,8 +314,8 @@ Parameters
    Choices: -
  • Min:1
  • -
  • Max:65535)
  • +
  • Min 1
  • +
  • Max 65535)
Default:
"will be auto-generated by DCNM"
@@ -338,8 +338,8 @@ Parameters
    Choices:
  • any
  • -
  • Min:1
  • -
  • Max:65535
  • +
  • Min 1
  • +
  • Max 65535
diff --git a/docs/cisco.dcnm.dcnm_service_route_peering_module.rst b/docs/cisco.dcnm.dcnm_service_route_peering_module.rst index 17086869c..c4c6cc691 100644 --- a/docs/cisco.dcnm.dcnm_service_route_peering_module.rst +++ b/docs/cisco.dcnm.dcnm_service_route_peering_module.rst @@ -2348,7 +2348,6 @@ Examples service_fabric: external config: node_name: IT-SN-1 # mandatory - node_name: IT-SN-2 # mandatory diff --git a/docs/cisco.dcnm.dcnm_template_module.rst b/docs/cisco.dcnm.dcnm_template_module.rst index 98c800136..764bcd606 100644 --- a/docs/cisco.dcnm.dcnm_template_module.rst +++ b/docs/cisco.dcnm.dcnm_template_module.rst @@ -117,6 +117,26 @@ Parameters
User defined labels for identifying the templates
+ + + +
+ type + +
+ string +
+ + +
    Choices: +
  • cli ←
  • +
  • python
  • +
+ + +
Type of the template content either CLI or Python
+ + diff --git a/plugins/modules/dcnm_resource_manager.py b/plugins/modules/dcnm_resource_manager.py new file mode 100644 index 000000000..f7014c4f4 --- /dev/null +++ b/plugins/modules/dcnm_resource_manager.py @@ -0,0 +1,1274 @@ +#!/usr/bin/python +# +# Copyright (c) 2022 Cisco and/or its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +__author__ = "Mallik Mudigonda" + +DOCUMENTATION = """ +--- +module: dcnm_resource_manager +short_description: DCNM ansible module for managing resources. +version_added: "2.1.0" +description: + - DCNM ansible module for creating, deleting and querying resources +author: Mallik Mudigonda (@mmudigon) +options: + fabric: + description: + - 'Name of the target fabric for resource manager operations' + type: str + required: true + state: + description: + - The required state of the configuration after module completion. + type: str + required: false + choices: + - merged + - deleted + - query + default: merged + config: + description: + - A list of dictionaries containing resources and switch information + type: list + elements: dict + suboptions: + entity_name: + description: + - A unique name which identifies the entity to which the resourcce is allocated to. + - The format of this parameter depends on the scope_type. The details are provided in + - the EXAMPLES section + type: str + required: true + pool_type: + description: + - Type of resource pool + type: str + required: true + choices: + - ID + - IP + - SUBNET + pool_name: + description: + - Name of the resource pool from which the resource is allocated + type: str + required: true + scope_type: + description: + - Socpe of resource allocation + type: str + required: true + choices: + - fabric + - device + - device_interface + - device_pair + - link + resource: + description: + - Value of the resource being allocated + - The value will be + - an integer if pool_type is ID + - an IPV4/IPV6 address if pool_type is IP + - an IPV4 address/net_mask or IPV6 address/net_maskif pool_type is SUBNET + type: str + required: true + switch: + description: + - IP address or DNS name of the management interface of the switch to which the allocated resource is assigned to. + type: list + required: false +""" + +EXAMPLES = """ +# Entity name format +# ================== +# +# The format of the entity name depends on the scope_type of the resource being allocated. + +# Scope Type Entity Name +# ===================================== +# Fabric Eg: My_Network_30000 +# Device Eg: loopback0 +# Device Pair Eg: FDO21331S8T~FDO21332E6X~vPC1 +# Device Interface Eg: FDO21332E6X~Ethernet1/13 +# Link Eg: FDO21332E6X~Ethernet1/3~FDO21331S8T~Ethernet1/3 + +# where FDO21331S8T and FDO21331S8T are switch serial numbers + +# This module supports the following states: + +# Merged: +# Resources defined in the playbook will be merged into the target fabric. +# - If the Resources does not exist it will be added. +# - If the Resources exists but properties managed by the playbook are different +# they will be updated if possible. +# - Resources that are not specified in the playbook will be untouched. +# +# Deleted: +# Resources defined in the playbook will be deleted. +# +# Query: +# Returns the current DCNM state for the Resources listed in the playbook. + +# CREATING RESOURCES +# ================== +- name: Create Resources + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + - 192.175.1.2 + resource: "500" # The value of the resource being created + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + resource: "fe:80::04" # The value of the resource being created + + - entity_name: "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - 192.175.1.1 + resource: "fe:80:05::05/64" + +# DELETING RESOURCES +# ================== + +- name: Delete Resources + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - 192.175.1.2 + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + + - entity_name: "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP, 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + +# QUERY SERVICE POLICIES +# ====================== + +- name: Query all Resources - no filters + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + +- name: Query Resources - filter by entity name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + - entity_name: "loopback_dev" # A unique name to identify the resource + - entity_name: "9M99N34RDED~9NXHSNTEO6C" # A unique name to identify the resource + - entity_name: "9M99N34RDED~Ethernet1/10" # A unique name to identify the resource + - entity_name: "9M99N34RDED~Ethernet1/2~~9NXHSNTEO6CEthernet1/2" # A unique name to identify the resource + +- name: Query Resources - filter by switch + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + +- name: Query Resources - filter by fabric and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + - pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + +- name: Query Resources - filter by switch and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.2 + +- name: Query Resources - mixed query + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: test_fabric + config: + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + - switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - 192.175.1.1 + +""" + +import time +import json +import copy +import ipaddress + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import ( + dcnm_send, + validate_list_of_dicts, + dcnm_version_supported, + get_ip_sn_dict, + get_fabric_inventory_details, + dcnm_get_ip_addr_info, +) + +from datetime import datetime + + +# Resource Class object which includes all the required methods and data to configure and maintain resources +class DcnmResManager: + dcnm_rm_paths = { + 11: { + "RM_GET_RESOURCES_BY_FABRIC": "/rest/resource-manager/fabrics/{}", + "RM_GET_RESOURCES_BY_SNO_AND_POOLNAME": "/rest/resource-manager/switch/{}/pools/{}", + "RM_GET_RESOURCES_BY_FABRIC_AND_POOLNAME": "/rest/resource-manager/fabric/{}/pools/{}", + "RM_CREATE_RESOURCE": "/rest/resource-manager/fabrics/{}/resources", + "RM_DELETE_RESOURCE": "/rest/resource-manager/resources?id=", + }, + 12: { + "RM_GET_RESOURCES_BY_FABRIC": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/{}", + "RM_GET_RESOURCES_BY_SNO_AND_POOLNAME": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/switch/{}/pools/{}", + "RM_GET_RESOURCES_BY_FABRIC_AND_POOLNAME": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabric/{}/pools/{}", + "RM_CREATE_RESOURCE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/{}/resources", + "RM_DELETE_RESOURCE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/resources?id=", + }, + } + + def __init__(self, module): + self.module = module + self.params = module.params + self.fabric = module.params["fabric"] + self.config = copy.deepcopy(module.params.get("config")) + self.rm_info = [] + self.want = [] + self.have = [] + self.diff_create = [] + self.diff_delete = [] + self.fd = None + self.res_pools = {} + self.changed_dict = [ + {"merged": [], "deleted": [], "query": [], "debugs": []} + ] + + self.dcnm_version = dcnm_version_supported(self.module) + + self.inventory_data = get_fabric_inventory_details( + self.module, self.fabric + ) + self.ip_sn, self.hn_sn = get_ip_sn_dict(self.inventory_data) + + self.paths = self.dcnm_rm_paths[self.dcnm_version] + self.result = dict(changed=False, diff=[], response=[]) + + def log_msg(self, msg): + + if self.fd is None: + self.fd = open("res_mgr.log", "w+") + if self.fd is not None: + self.fd.write(msg) + self.fd.write("\n") + self.fd.flush() + + def dcnm_rm_validate_and_build_rm_info(self, cfg, rm_spec): + + """ + Routine to validate the playbook input and fill up default values for objects not included. + In this case we validate the playbook against rm_spec which inlcudes required information + This routine updates self.rm_info with validated playbook information by defaulting values + not included + + Parameters: + cfg (dict): The config from playbook + rm_spec (dict): Resource Manager spec + + Returns: + None + """ + + rm_info, invalid_params = validate_list_of_dicts(cfg, rm_spec) + if invalid_params: + mesg = "Invalid parameters in playbook: {0}".format( + "while processing Resource - " + + cfg[0]["entity_name"] + + ", " + + "\n".join(invalid_params) + ) + self.module.fail_json(msg=mesg) + + self.rm_info.extend(rm_info) + + def dcnm_rm_check_resource_params(self, res): + + """ + Routine to validate the poolname and scope type combinations. Since all such combinations are + not valid, this routine checks for valid combinations + + Parameters: + res (dict): Resource information + + Returns: + True - if the resource has a valid poolname-scopetype combination + False - Otherwise + """ + + poolname_to_scope_type = { + "L3_VNI": ["fabric"], + "L2_VNI": ["fabric"], + "VPC_ID": ["device_pair"], + "FEX_ID": ["device"], + "BGP_ASN_ID": ["fabric"], + "LOOPBACK_ID": ["device"], + "PORT_CHANNEL_ID": ["device"], + "VPC_DOMAIN_ID": ["fabric"], + "VPC_PEER_LINK_VLAN": ["device_pair"], + "TOP_DOWN_L3_DOT1Q": ["device_interface"], + "TUNNEL_ID_IOS_XE": ["device"], + "OBJECT_TRACKING_NUMBER_POOL": ["device"], + "INSTANCE_ID": ["device"], + "PORT_CHANNEL_ID_IOS_XE": ["device"], + "ROUTE_MAP_SEQUENCE_NUMBER_POOL": ["device"], + "SERVICE_NETWORK_VLAN": ["device"], + "TOP_DOWN_VRF_VLAN": ["device"], + "TOP_DOWN_NETWORK_VLAN": ["device"], + "IP_POOL": ["fabric", "device_interface"], + "SUBNET": ["link"], + } + # RESOURCE = {'entity_name': 'l3_vni_fabric', 'pool_type': 'ID', 'pool_name': 'L3_VNI', 'scope_type': 'fabric', 'resource': '101'} + + # Configuration in query state may not include all parameters. So don;t try to validate + if self.module.params["state"] == "query": + return True, "" + + if res["pool_type"] == "ID": + pool_name = res["pool_name"] + elif res["pool_type"] == "IP": + pool_name = "IP_POOL" + elif res["pool_type"] == "SUBNET": + pool_name = "SUBNET" + else: + return ( + False, + "Given pool type = '" + res["pool_type"] + "' is invalid," + " Allowed pool types = ['ID', 'IP', 'SUBNET']", + ) + + if poolname_to_scope_type.get(pool_name, None) is None: + return ( + False, + "Given pool name '" + res["pool_name"] + "' is not valid", + ) + if res["scope_type"] not in poolname_to_scope_type[pool_name]: + return ( + False, + "Given scope type '" + + res["scope_type"] + + "' is not valid for pool name = '" + + res["pool_name"] + + "', Allowed scope_types = " + + str(poolname_to_scope_type[pool_name]), + ) + return True, "" + + def dcnm_rm_validate_input(self): + + """ + Routine to validate playbook input based on the state. Since each state has a different + config structure, this routine handles the validation based on the given state + + Parameters: + None + + Returns: + None + """ + + if None is self.config: + return + + cfg = [] + for item in self.config: + + if self.module.params["state"] != "query": + if item.get("scope_type", None) is None: + self.module.fail_json( + msg="Mandatory parameter 'scope_type' missing" + ) + + if item.get("pool_type", None) is None: + self.module.fail_json( + msg="Mandatory parameter 'pool_type' missing" + ) + + if item.get("pool_name", None) is None: + self.module.fail_json( + msg="Mandatory parameter 'pool_name' missing" + ) + + if item.get("entity_name", None) is None: + self.module.fail_json( + msg="Mandatory parameter 'entity_name' missing" + ) + + rc, mesg = self.dcnm_rm_check_resource_params(item) + if not rc: + self.module.fail_json(msg=mesg) + + citem = copy.deepcopy(item) + + cfg.append(citem) + + if self.module.params["state"] == "query": + # config for query state is different. So validate query state differently + self.dcnm_rm_validate_query_state_input(cfg) + else: + self.dcnm_rm_validate_rm_input(cfg) + cfg.remove(citem) + + def dcnm_rm_validate_rm_input(self, cfg): + + """ + Routine to validate the playbook input. This routine updates self.rm_info + with validated playbook information by defaulting values not included + + Parameters: + cfg (dict): The config from playbook + + Returns: + None + """ + + rm_spec = dict( + entity_name=dict(required=True, type="str"), + pool_type=dict(required=True, type="str"), + pool_name=dict(required=True, type="str"), + scope_type=dict(required=True, type="str"), + ) + + if cfg[0]["scope_type"] != "fabric": + rm_spec["switch"] = dict(required=True, type="list") + + if self.module.params["state"] == "merged": + if cfg[0]["pool_type"] == "ID": + rm_spec["resource"] = dict(required=True, type="int") + if cfg[0]["pool_type"] == "IP": + if isinstance( + ipaddress.ip_address(cfg[0]["resource"]), + ipaddress.IPv4Address, + ): + rm_spec["resource"] = dict(required=True, type="ipv4") + if isinstance( + ipaddress.ip_address(cfg[0]["resource"]), + ipaddress.IPv6Address, + ): + rm_spec["resource"] = dict(required=True, type="ipv6") + if cfg[0]["pool_type"] == "SUBNET": + ip_addr = cfg[0]["resource"].split("/")[0] + if isinstance( + ipaddress.ip_address(ip_addr), ipaddress.IPv4Address + ): + rm_spec["resource"] = dict( + required=True, type="ipv4_subnet" + ) + if isinstance( + ipaddress.ip_address(ip_addr), ipaddress.IPv6Address + ): + rm_spec["resource"] = dict( + required=True, type="ipv6_subnet" + ) + + self.dcnm_rm_validate_and_build_rm_info(cfg, rm_spec) + + def dcnm_rm_validate_query_state_input(self, cfg): + + """ + Playbook input will be different for differnt states. This routine validates the query state + input. This routine updates self.rm_info with validated playbook information related to query + state. + + Parameters: + cfg (dict): The config from playbook + + Returns: + None + """ + + rm_spec = dict( + entity_name=dict(type="str"), + pool_name=dict(type="str"), + switch=dict(type="list"), + ) + + rm_info, invalid_params = validate_list_of_dicts(cfg, rm_spec) + if invalid_params: + mesg = "Invalid parameters in playbook: {0}".format(invalid_params) + self.module.fail_json(msg=mesg) + + if rm_info: + self.rm_info.extend(rm_info) + + def dcnm_rm_get_rm_payload(self, rm, sw): + + """ + This routine builds the complete Resource Manager payload based on the information in self.want + + Parameters: + rm (dict): Resource information + + Returns: + rm_payload (dict): Resource payload information populated with appropriate data from playbook config + """ + + scope_type_xlate = { + "fabric": "Fabric", + "device": "Device", + "device_interface": "DeviceInterface", + "device_pair": "DevicePair", + "link": "Link", + } + + rm_payload = {} + + # Populate the common information + + rm_payload["poolName"] = rm["pool_name"] + + # Scope type values from playbook must be converted to payload format. + + rm_payload["scopeType"] = scope_type_xlate[rm["scope_type"]] + rm_payload["entityName"] = rm["entity_name"] + rm_payload["resource"] = rm.get("resource", None) + + rm_payload["scopeValue"] = ( + self.fabric if rm["scope_type"] == "fabric" else self.ip_sn[sw] + ) + + return rm_payload + + def dcnm_rm_get_want(self): + + """ + This routine updates self.want with the payload information based on the playbook configuration. + + Parameters: + None + + Returns: + None + """ + + if None is self.config: + return + + if not self.rm_info: + return + + # self.rm_info is a list of directories each having config related to a particular resource + for rm_elem in self.rm_info: + if rm_elem.get("switch", None): + for sw in rm_elem["switch"]: + rm_payload = self.dcnm_rm_get_rm_payload(rm_elem, sw) + if rm_payload not in self.want: + self.want.append(rm_payload) + else: + rm_payload = self.dcnm_rm_get_rm_payload(rm_elem, None) + if rm_payload not in self.want: + self.want.append(rm_payload) + + def dcnm_rm_compare_entity_names(self, e1, e2): + + # Eventhough entity names are strings, the same ca be a combination of two serial numbers in + # certain cases. The order of these serial numbers may be different on the DCNM server than + # what is given in the playbook. So we split the entity name, sort the same and then compare + # the resulting contents + return sorted(e1.split("~")) == sorted(e2.split("~")) + + def dcnm_rm_get_rm_info_from_dcnm(self, res, res_type): + + """ + Routine to get existing Resource information from DCNM which matches the given Resource. + + Parameters: + res (dict): Resource information + res_type (string): String indicating whether the 'res' passed is in 'PLAYBOOK' format + or 'PAYLOAD' format + Returns: + resp["DATA"] (dict): Resource informatikon obtained from the DCNM server if it exists + [] otherwise + """ + + key = res["scopeValue"] + "_" + res["poolName"] + + if self.res_pools.get(key, None) is None: + if res["scopeType"] == "Fabric": + path_str = "RM_GET_RESOURCES_BY_FABRIC_AND_POOLNAME" + else: + path_str = "RM_GET_RESOURCES_BY_SNO_AND_POOLNAME" + + if res_type == "PAYLOAD": + path = self.paths[path_str].format( + res["scopeValue"], res["poolName"] + ) + else: + path = "" + + resp = dcnm_send(self.module, "GET", path) + + if resp and (resp["RETURN_CODE"] == 200) and resp["DATA"]: + self.res_pools[key] = resp["DATA"] + else: + return [] + + for relem in self.res_pools[key]: + # For switch and serial number combination, poolName will not be filled with proper value + # Since we know which pool is used in this run, fill it up here + relem["resourcePool"]["poolName"] = res["poolName"] + if self.dcnm_rm_match_resources( + relem, res, res["scopeType"].lower() + ): + return relem + else: + if self.dcnm_rm_compare_entity_names( + relem["entityName"], res["entityName"] + ): + mismatch_values = self.dcnm_rm_get_mismatched_values( + relem, res, res["scopeType"].lower() + ) + self.changed_dict[0]["debugs"].append( + { + "Entity Name": res["entityName"], + "MISMATCHED_VALUES": mismatch_values, + } + ) + return [] + + def dcnm_rm_get_have(self): + + """ + Routine to get exisitng resource information from DCNM that matches information in self.want. + This routine updates self.have with all the resources that match the given playbook configuration + + Parameters: + None + + Returns: + None + """ + + if self.want == []: + return + + for res in self.want: + have = self.dcnm_rm_get_rm_info_from_dcnm(res, "PAYLOAD") + if (have != []) and (have not in self.have): + self.have.append(have) + + def dcnm_rm_compare_resource_values(self, r1, r2): + + """ + Routine to compare the resource values. Resource values will be different for ID, IP and SUBNET + pools. For IP and SUBNET the addresses can be included with subnet masks. This routine compares + these values appropriately + + Parameters: + r1 : First Resource value + r2 : Second Resource value + + Returns: + True - if both resource values same + False - otherwise + """ + + rv1 = [] + rv2 = [] + r1_ip4 = False + r2_ip4 = False + r1_ip6 = False + r2_ip6 = False + + if "." in r1: + r1_ip4 = True + if "." in r2: + r2_ip4 = True + + if ":" in r1: + r1_ip6 = True + if ":" in r2: + r2_ip6 = True + + if "/" in r1: + rv1 = r1.split("/") + if "/" in r1: + rv2 = r2.split("/") + + if r1_ip4 and r2_ip4: + if rv1 and not rv2: + return False + if rv2 and not rv1: + return False + if rv1 and rv2: + return ( + ipaddress.IPv4Address(rv1[0]).exploded + == ipaddress.IPv4Address(rv2[0]).exploded + ) and (rv1[1] == rv2[1]) + else: + return ( + ipaddress.IPv4Address(r1).exploded + == ipaddress.IPv4Address(r2).exploded + ) + + if r1_ip6 and r2_ip6: + if rv1 and not rv2: + return False + if rv2 and not rv1: + return False + if rv1 and rv2: + return ( + ipaddress.IPv6Address(rv1[0]).exploded + == ipaddress.IPv6Address(rv2[0]).exploded + ) and (rv1[1] == rv2[1]) + else: + return ( + ipaddress.IPv6Address(r1).exploded + == ipaddress.IPv6Address(r2).exploded + ) + + return r1 == r2 + + def dcnm_rm_compare_resources(self, res): + + """ + This routine finds a resource in self.have that matches the given resource. If the given + resource already exist then the resource is not added to the resource list to be created on + DCNM server in the current run. The given resource is added to the list of resources to be + created otherwise + + Parameters: + res : Resource to be matched from self.have + + Returns: + DCNM_RES_ADD - if given resource is not found + DCNM_RES_DONT_ADD - otherwise + """ + + # Comparing resources is different for resources of scopeType Fabric and others + + match_res = [] + match_res = [ + relem + for relem in self.have + if ( + self.dcnm_rm_match_resources( + relem, res, res["scopeType"].lower() + ) + ) + ] + + if match_res != []: + # Found a matching resource. Check the resource values here. If they are same then the given resource + # is identical to the existing resource. Otherwise we should add it. + if self.dcnm_rm_compare_resource_values( + str(match_res[0]["allocatedIp"]), str(res["resource"]) + ): + return "DCNM_RES_DONT_ADD" + else: + return "DCNM_RES_ADD" + else: + + return "DCNM_RES_ADD" + + def dcnm_rm_get_diff_merge(self): + + """ + Routine to populate a list of payload information in self.diff_create to create new resources. + + Parameters: + None + + Returns: + None + """ + + if not self.want: + return + + for res in self.want: + + rc = self.dcnm_rm_compare_resources(res) + + if rc == "DCNM_RES_ADD": + # Resource does not exists, create a new one. + if res not in self.diff_create: + self.changed_dict[0]["merged"].append(res) + self.diff_create.append(res) + + def dcnm_rm_get_mismatched_values(self, res1, res2, scope): + + """ + Routine to find the resource parameters that are not matching. Routine compares the two resources + given and populates mismatch_values with parameters that don't match + + Parameters: + res1 - First resource + res2 - Second resource + scope - scope of the resources + + Returns: + mismatch_values - a list of dicts containing mismatched values + """ + + mismatch_values = [] + + if res1["entityType"] != res2["scopeType"]: + mismatch_values.append( + { + "have_entity_type": res1["entityType"], + "want_scope_type": res2["scopeType"], + } + ) + if res1["resourcePool"]["poolName"] != res2["poolName"]: + mismatch_values.append( + { + "have_pool_name": res1["resourcePool"]["poolName"], + "want_pool_nme": res2["poolName"], + } + ) + + if scope == "fabric": + if res1["resourcePool"]["fabricName"] != self.fabric: + mismatch_values.append( + { + "have_fabric_name": res1["resourcePool"]["fabricName"], + "want_fabric_name": self.fabric, + } + ) + else: + if res1["allocatedScopeValue"] != res2["scopeValue"]: + mismatch_values.append( + { + "have_scope_value": res1["allocatedScopeValue"], + "want_scope_value": res2["scopeValue"], + } + ) + return mismatch_values + + def dcnm_rm_match_resources(self, res1, res2, scope): + + """ + Routine compares two resources based on the given scope + + Parameters: + res1 - First resource + res2 - Second resource + scope - scope of the resources + + Returns: + True - if resources match + False - otherwise + """ + + if not self.dcnm_rm_compare_entity_names( + res1["entityName"], res2["entityName"] + ): + return False + if res1["entityType"] != res2["scopeType"]: + return False + if res1["resourcePool"]["poolName"] != res2["poolName"]: + return False + + if scope == "fabric": + if res1["resourcePool"]["fabricName"] != self.fabric: + return False + else: + # For scope values of "device_pair", "link" and "device_interface", the scope value will be set + # to the first part of the entity name by DCNM even though a specific scope value is included + # in the create payload. So for such scope values we wil check the first part of the entity name + # also + if ( + res1["allocatedScopeValue"] != res2["scopeValue"] + and res1["allocatedScopeValue"] + != res1["entityName"].split("~")[0] + ): + return False + return True + + def dcnm_rm_get_diff_deleted(self): + + """ + Routine to get a list of payload information that will be used to delete resources. + This routine updates self.diff_delete with payloads that are used to delete resources + from the server. + + Parameters: + None + + Returns: + None + """ + + for res in self.have: + self.diff_delete.append(str(res["id"])) + if self.diff_delete: + self.changed_dict[0]["deleted"].extend(self.diff_delete) + + def dcnm_rm_get_diff_query(self): + + """ + Routine to get resource information based on the playbook configuration. + This routine updates self.result with resources requested for in the playbook if they exist on + the DCNM server. + + Parameters: + None + + Returns: + None + """ + + if self.rm_info == []: + # No config is included in input. Get all pools by Fabric + path = self.paths["RM_GET_RESOURCES_BY_FABRIC"].format(self.fabric) + + resp = dcnm_send(self.module, "GET", path) + + if resp and resp["RETURN_CODE"] == 200 and resp["DATA"]: + self.result["response"].extend(resp["DATA"]) + else: + res_pools = {} + for res in self.rm_info: + + filter_by_entity_name = False + filter_by_switch = False + path_list = [] + + # Check if entity name is included. If so filter the output by entity name + if res.get("entity_name", None) is not None: + filter_by_entity_name = True + if res.get("pool_name", None) is not None: + # Check if switch is included. + if res.get("switch", None) is not None: + for sw in res["switch"]: + path_list.append( + self.paths[ + "RM_GET_RESOURCES_BY_SNO_AND_POOLNAME" + ].format(self.ip_sn[sw], res["pool_name"]) + ) + filter_by_switch = True + else: + path_list.append( + self.paths[ + "RM_GET_RESOURCES_BY_FABRIC_AND_POOLNAME" + ].format(self.fabric, res["pool_name"]) + ) + else: + path_list.append( + self.paths["RM_GET_RESOURCES_BY_FABRIC"].format( + self.fabric + ) + ) + # Check if switch is included. + if res.get("switch", None) is not None: + filter_by_switch = True + + for path in path_list: + if res_pools.get(path, None) is None: + resp = dcnm_send(self.module, "GET", path) + else: + resp = res_pools[path] + + if resp and resp["RETURN_CODE"] == 200 and resp["DATA"]: + + if res_pools.get(path, None) is None: + # Note down the resources fetched against the "path". This was we need not fetch the resources again + # if required from the same path + res_pools[path] = resp + + if ( + filter_by_entity_name is False + and filter_by_switch is False + ): + self.result["response"].extend(resp["DATA"]) + continue + + rlist = resp["DATA"] + + # Check if filters are set. If so filter the content based on the filter values + if filter_by_entity_name and filter_by_switch: + match_res = [ + relem + for relem in rlist + if ( + self.dcnm_rm_compare_entity_names( + relem["entityName"], res["entity_name"] + ) + and self.dcnm_rm_match_switch( + relem["allocatedScopeValue"], + res["switch"], + ) + ) + ] + elif filter_by_entity_name: + match_res = [ + relem + for relem in rlist + if self.dcnm_rm_compare_entity_names( + relem["entityName"], res["entity_name"] + ) + ] + elif filter_by_switch: + match_res = [ + relem + for relem in rlist + if self.dcnm_rm_match_switch( + relem["allocatedScopeValue"], res["switch"] + ) + ] + + if match_res: + self.result["response"].extend(match_res) + + def dcnm_rm_match_switch(self, sw, sw_list): + + """ + Routine to compare switch information. This is used to filter out resource information during query. + + Parameters: + sw - switch information included in the resource on DCNM server + sw_list - list of switches included in the resource from playbook config + + Returns: + True - if the switch information is present in the resource from DCNM + False - otherwise + """ + + for sw_elem in sw_list: + if sw == self.ip_sn[sw_elem]: + return True + return False + + def dcnm_rm_send_message_to_dcnm(self): + + """ + Routine to push payloads to DCNM server. This routine implements reqquired error checks and retry mechanisms to handle + transient errors. This routine checks self.diff_create, self.diff_delete lists and push appropriate requests to DCNM. + + Parameters: + None + + Returns: + None + """ + + resp = None + create_flag = False + delete_flag = False + + path = self.paths["RM_CREATE_RESOURCE"].format(self.fabric) + + for res in self.diff_create: + + json_payload = json.dumps(res) + resp = dcnm_send(self.module, "POST", path, json_payload) + + create_flag = True + + self.result["response"].append(resp) + if resp and resp.get("RETURN_CODE") != 200: + resp["CHANGED"] = self.changed_dict[0] + self.module.fail_json(msg=resp) + + if self.diff_delete: + path = self.paths["RM_DELETE_RESOURCE"].format(self.fabric) + + del_path = path + ",".join(self.diff_delete) + + resp = dcnm_send(self.module, "DELETE", del_path) + + delete_flag = True + + self.result["response"].append(resp) + if resp and resp.get("RETURN_CODE") != 200: + resp["CHANGED"] = self.changed_dict[0] + self.module.fail_json(msg=resp) + + self.result["changed"] = create_flag or delete_flag + + def dcnm_rm_translate_switch_info(self, config, ip_sn, hn_sn): + + """ + Routine to translate parameters in playbook if required. This routine converts the hostname information included in + playbook to actual addresses. + + Parameters: + config - The resource which needs tranlation + ip_sn - IP address to serial number mappings + hn_sn - hostname to serial number mappings + + Returns: + None + """ + + if None is config: + return + + for cfg in config: + + index = 0 + + if None is cfg.get("switch", None): + continue + for sw_elem in cfg["switch"]: + addr_info = dcnm_get_ip_addr_info( + self.module, sw_elem, ip_sn, hn_sn + ) + cfg["switch"][index] = addr_info + index = index + 1 + + +def main(): + + """ main entry point for module execution + """ + element_spec = dict( + fabric=dict(required=True, type="str"), + config=dict(required=False, type="list", elements="dict"), + state=dict( + type="str", + default="merged", + choices=["merged", "deleted", "query"], + ), + ) + + module = AnsibleModule( + argument_spec=element_spec, supports_check_mode=True + ) + + dcnm_rm = DcnmResManager(module) + + dcnm_rm.result["StartTime"] = datetime.now().strftime("%H:%M:%S") + + state = module.params["state"] + + if not dcnm_rm.config: + if state == "merged" or state == "deleted": + module.fail_json( + msg="'config' element is mandatory for state '{0}', given = '{1}'".format( + state, dcnm_rm.config + ) + ) + + dcnm_rm.dcnm_rm_translate_switch_info( + dcnm_rm.config, dcnm_rm.ip_sn, dcnm_rm.hn_sn + ) + + dcnm_rm.dcnm_rm_validate_input() + + if module.params["state"] != "query": + dcnm_rm.dcnm_rm_get_want() + dcnm_rm.dcnm_rm_get_have() + + if module.params["state"] == "merged": + dcnm_rm.dcnm_rm_get_diff_merge() + + if module.params["state"] == "deleted": + dcnm_rm.dcnm_rm_get_diff_deleted() + + if module.params["state"] == "query": + dcnm_rm.dcnm_rm_get_diff_query() + + dcnm_rm.result["diff"] = dcnm_rm.changed_dict + + if dcnm_rm.diff_create or dcnm_rm.diff_delete: + dcnm_rm.result["changed"] = True + + if module.check_mode: + dcnm_rm.result["changed"] = False + dcnm_rm.result["EndTime"] = datetime.now().strftime("%H:%M:%S") + module.exit_json(**dcnm_rm.result) + + dcnm_rm.dcnm_rm_send_message_to_dcnm() + + module.exit_json(**dcnm_rm.result) + + +if __name__ == "__main__": + main() diff --git a/tests/integration/targets/dcnm_resource_manager/defaults/main.yaml b/tests/integration/targets/dcnm_resource_manager/defaults/main.yaml new file mode 100644 index 000000000..5f709c5aa --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/defaults/main.yaml @@ -0,0 +1,2 @@ +--- +testcase: "*" diff --git a/tests/integration/targets/dcnm_resource_manager/meta/main.yaml b/tests/integration/targets/dcnm_resource_manager/meta/main.yaml new file mode 100644 index 000000000..32cf5dda7 --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/meta/main.yaml @@ -0,0 +1 @@ +dependencies: [] diff --git a/tests/integration/targets/dcnm_resource_manager/tasks/dcnm.yaml b/tests/integration/targets/dcnm_resource_manager/tasks/dcnm.yaml new file mode 100644 index 000000000..881b81cb6 --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tasks/dcnm.yaml @@ -0,0 +1,20 @@ +--- +- name: collect dcnm test cases + find: + paths: "{{ role_path }}/tests/dcnm" + patterns: "{{ testcase }}.yaml" + connection: local + register: dcnm_cases + +- set_fact: + test_cases: + files: "{{ dcnm_cases.files }}" + +- name: set test_items + set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}" + +- name: run test cases (connection=httpapi) + include: "{{ test_case_to_run }}" + with_items: "{{ test_items }}" + loop_control: + loop_var: test_case_to_run diff --git a/tests/integration/targets/dcnm_resource_manager/tasks/main.yaml b/tests/integration/targets/dcnm_resource_manager/tasks/main.yaml new file mode 100644 index 000000000..78c5fb834 --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tasks/main.yaml @@ -0,0 +1,2 @@ +--- +- { include: dcnm.yaml, tags: ['dcnm'] } \ No newline at end of file diff --git a/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_delete.yaml b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_delete.yaml new file mode 100644 index 000000000..5a3d36757 --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_delete.yaml @@ -0,0 +1,270 @@ +############################################## +## SETUP ## +############################################## + +- name: Remove local log file + local_action: command rm -f res_mgr.log + +- name: Delete Resources + cisco.dcnm.dcnm_resource_manager: &rm_delete + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + +- assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +- block: + +############################################## +## MERGE ## +############################################## + + - name: Create Resources + cisco.dcnm.dcnm_resource_manager: &rm_merge + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "102" # The value of the resource being created + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch2 }}" # provide the switch information to which the given resource is to be attached + resource: "200" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch2 }}" # provide the switch information to which the given resource is to be attached + resource: "500" # The value of the resource being created + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" # provide the switch information to which the given resource is to be attached + resource: "fe:80::04" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80:0505::05/64" # The value of the resource being created + register: result + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 9' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["response"] | length) == 9' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## DELETE ## +############################################## + + - name: Delete Resources + cisco.dcnm.dcnm_resource_manager: *rm_delete + register: result + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 0' + - '(result["diff"][0]["deleted"] | length) == 9' + - '(result["response"][0]["DATA"]["successList"] | length) == 9' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## IDEMPOTENCE ## +############################################## + + - name: Delete Resources - Idempotence + cisco.dcnm.dcnm_resource_manager: *rm_delete + register: result + + - assert: + that: + - 'result.changed == false' + - '(result["diff"][0]["merged"] | length) == 0' + - '(result["diff"][0]["deleted"] | length) == 0' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## MISSING PARAMS IN DELETE ## +############################################## + + - name: Delete Resources - scope_type missing + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''scope_type'' missing"' + +############################################## +## MISSING PARAMS IN DELETE ## +############################################## + + - name: Delete Resources - pool_type missing + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - scope_type: "fabric" + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''pool_type'' missing"' + + ############################################## +## MISSING PARAMS IN DELETE ## +############################################## + + - name: Delete Resources - entity_name missing + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "fabric" + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''entity_name'' missing"' + +############################################## +## MISSING PARAMS IN DELETE ## +############################################## + + - name: Delete Resources - pool_name missing + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + scope_type: "fabric" + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''pool_name'' missing"' + +############################################## +## MISSING PARAMS IN DELETE ## +############################################## + + - name: Delete Resources - switch info missing + cisco.dcnm.dcnm_resource_manager: + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + register: result + ignore_errors: yes + + - assert: + that: + - '"switch : Required parameter not found" in result["msg"]' diff --git a/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_invalid_params.yaml b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_invalid_params.yaml new file mode 100644 index 000000000..fa4fa65ee --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_invalid_params.yaml @@ -0,0 +1,130 @@ +############################################## +## SETUP ## +############################################## + +- name: Remove local log file + local_action: command rm -f res_mgr.log + +- block: + +############################################## +## MERGE ## +############################################## + + - name: Create Resources - Invalid Pool type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "IDLE" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - Invalid Pool Name + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "WRONG_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - L3 VNI wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "102" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - L2VNI - wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "102" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - LOOPBACK_ID wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - VPC_ID wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "500" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - LOOPBACK0_IP_POOL wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - LOOPBACK1_IP_POOL wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80::04" # The value of the resource being created + ignore_errors: yes + + - name: Create Resources - SUBNET wrong scope type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80:0505::05/64" # The value of the resource being created + ignore_errors: yes diff --git a/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_merge.yaml b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_merge.yaml new file mode 100644 index 000000000..daf3ded9e --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_merge.yaml @@ -0,0 +1,373 @@ +############################################## +## SETUP ## +############################################## + +- name: Remove local log file + local_action: command rm -f res_mgr.log + +- name: Delete Resources + cisco.dcnm.dcnm_resource_manager: &rm_delete + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + +- assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +- block: + +############################################## +## MERGE ## +############################################## + + - name: Create Resources + cisco.dcnm.dcnm_resource_manager: &rm_merge + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "102" # The value of the resource being created + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "500" # The value of the resource being created + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80::04" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80:0505::05/64" # The value of the resource being created + register: result + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 9' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["response"] | length) == 9' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + + +############################################## +## IDEMPOTENCE ## +############################################## + + - name: Create Resources - Idempotence + cisco.dcnm.dcnm_resource_manager: *rm_merge + register: result + + - assert: + that: + - 'result.changed == false' + - '(result["diff"][0]["merged"] | length) == 0' + - '(result["diff"][0]["deleted"] | length) == 0' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## MODIFY EXISTING RESOURCES ## +############################################## + + - name: Modify Resources + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "1001" # The value of the resource being modified + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "1002" # The value of the resource being modified + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "1003" # The value of the resource being modified + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "1005" # The value of the resource being modified + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "111.1.1.1" # The value of the resource being modified + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:81::04" # The value of the resource being modified + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:81:0505::05/64" # The value of the resource being modified + register: result + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 9' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["response"] | length) == 9' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## MERGE - MISSING PARAMS ## +############################################## + + - name: Create Resources - Missing switch info + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "200" # The value of the resource being created + register: result + ignore_errors: yes + + - assert: + that: + - '"switch : Required parameter not found" in result["msg"]' + + - name: Create Resources - Missing scope_type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''scope_type'' missing"' + + - name: Create Resources - Missing pool_type + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''pool_type'' missing"' + + - name: Create Resources - Missing pool_name + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''pool_name'' missing"' + + - name: Create Resources - Missing entity_name + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + register: result + ignore_errors: yes + + - assert: + that: + - 'result["msg"] == "Mandatory parameter ''entity_name'' missing"' + + - name: Create Resources - Missing resource value + cisco.dcnm.dcnm_resource_manager: + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + register: result + ignore_errors: yes + + - assert: + that: + - '"resource : Required parameter not found" in result["msg"]' + +############################################## +## CLEANUP ## +############################################## + + always: + + - name: Delete Resources + cisco.dcnm.dcnm_resource_manager: *rm_delete + register: result + when: IT_CONTEXT is not defined + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 0' + - '(result["diff"][0]["deleted"] | length) == 9' + - '(result["response"][0]["DATA"]["successList"] | length) == 9' + when: IT_CONTEXT is not defined + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + when: IT_CONTEXT is not defined diff --git a/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_query.yaml b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_query.yaml new file mode 100644 index 000000000..8591b2646 --- /dev/null +++ b/tests/integration/targets/dcnm_resource_manager/tests/dcnm/dcnm_res_manager_query.yaml @@ -0,0 +1,331 @@ +############################################## +## SETUP ## +############################################## + +- name: Remove local log file + local_action: command rm -f res_mgr.log + +- name: Delete Resources + cisco.dcnm.dcnm_resource_manager: &rm_delete + state: deleted # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + +- assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +- block: + +############################################## +## MERGE ## +############################################## + + - name: Create Resources + cisco.dcnm.dcnm_resource_manager: &rm_merge + state: merged # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "101" # The value of the resource being created + + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "102" # The value of the resource being created + + - entity_name: "loopback_dev" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + scope_type: "device" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "200" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + pool_type: "ID" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + scope_type: "device_pair" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + - "{{ ansible_switch2 }}" + resource: "500" # The value of the resource being created + + - entity_name: "mmudigon-2" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "fabric" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + resource: "110.1.1.1" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + pool_type: "IP" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + scope_type: "device_interface" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80::04" # The value of the resource being created + + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + pool_type: "SUBNET" # choose from ['ID', 'IP', 'SUBNET'] + pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + scope_type: "link" # choose from ['fabric', 'device', device_interface', 'device_pair', 'link'] + switch: # provide the switch information to which the given resource is to be attached + - "{{ ansible_switch1 }}" + resource: "fe:80:0505::05/64" # The value of the resource being created + register: result + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 9' + - '(result["diff"][0]["deleted"] | length) == 0' + - '(result["response"] | length) == 9' + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + +############################################## +## QUERY ## +############################################## + + - name: Query all Resources - no filters + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + register: result + + - assert: + that: + - '(result["response"] | length) != 0' + + - name: Creating entity_names + set_fact: + entity_names: + - "l3_vni_fabric" + - "l2_vni_fabric" + - "loopback_dev" + - "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" + - "{{ ansible_sno_2 }}~{{ ansible_sno_1 }}" + - "{{ ansible_sno_1 }}~{{ intf_1_10 }}" + - "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" + - "{{ ansible_sno_2 }}~{{ intf_1_3 }}~{{ ansible_sno_1 }}~{{ intf_1_3 }}" + - "mmudigon-2" + + - name: Query Resources - filter by entity name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l3_vni_fabric" # A unique name to identify the resource + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + - entity_name: "loopback_dev" # A unique name to identify the resource + - entity_name: "{{ ansible_sno_1 }}~{{ ansible_sno_2 }}" # A unique name to identify the resource + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_10 }}" # A unique name to identify the resource + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_2 }}~{{ ansible_sno_2 }}~{{ intf_1_2 }}" # A unique name to identify the resource + - entity_name: "mmudigon-2" # A unique name to identify the resource + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_2 }}" # A unique name to identify the resource + - entity_name: "{{ ansible_sno_1 }}~{{ intf_1_3 }}~{{ ansible_sno_2 }}~{{ intf_1_3 }}" # A unique name to identify the resource + + register: result + + - assert: + that: + - 'item["entityName"] in {{ entity_names }}' + loop: '{{ result.response }}' + + - name: Query Resources - filter by switch 1 + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + + - assert: + that: + - 'item["ipAddress"] == "{{ ansible_switch1 }}"' + loop: '{{ result.response }}' + + - name: Query Resources - filter by switch 2 + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch2 }}" + register: result + + - assert: + that: + - 'item["ipAddress"] == "{{ ansible_switch2 }}"' + loop: '{{ result.response }}' + + - name: Query Resources - filter by fabric and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_name: "BGP_ASN_ID" # Based on the 'poolType', select appropriate name + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + - pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_PEER_LINK_VLAN" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + - pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + - pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + - pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + register: result + + - assert: + that: + - 'item["resourcePool"]["fabricName"] == "{{ ansible_it_fabric }}"' + - 'item["resourcePool"]["poolName"] in [ + "BGP_ASN_ID", + "L3_VNI", + "L2_VNI", + "LOOPBACK_ID", + "VPC_PEER_LINK_VLAN", + "VPC_ID", + "LOOPBACK0_IP_POOL", + "LOOPBACK1_IP_POOL", + "SUBNET" + ]' + loop: '{{ result.response }}' + + - name: Query Resources - filter by switch and pool name + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - pool_name: "BGP_ASN_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "L3_VNI" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "L2_VNI" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "VPC_PEER_LINK_VLAN" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "LOOPBACK0_IP_POOL" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "LOOPBACK1_IP_POOL" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - pool_name: "SUBNET" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + + - assert: + that: + - 'item["ipAddress"] == "{{ ansible_switch1 }}"' + loop: '{{ result.response }}' + + - name: Query Resources - mixed query + cisco.dcnm.dcnm_resource_manager: + state: query # choose form [merged, deleted, query] + fabric: "{{ ansible_it_fabric }}" + config: + - entity_name: "l2_vni_fabric" # A unique name to identify the resource + - switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + - switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch2 }}" + - pool_name: "LOOPBACK_ID" # Based on the 'poolType', select appropriate name + - pool_name: "VPC_ID" # Based on the 'poolType', select appropriate name + switch: # provide the switch information to which the given resource is attached + - "{{ ansible_switch1 }}" + register: result + + - assert: + that: + - '(result["response"] | length) != 0' + +############################################## +## CLEANUP ## +############################################## + + always: + + - name: Delete Resources + cisco.dcnm.dcnm_resource_manager: *rm_delete + register: result + when: IT_CONTEXT is not defined + + - assert: + that: + - 'result.changed == true' + - '(result["diff"][0]["merged"] | length) == 0' + - '(result["diff"][0]["deleted"] | length) == 9' + - '(result["response"][0]["DATA"]["successList"] | length) == 9' + when: IT_CONTEXT is not defined + + - assert: + that: + - 'item["RETURN_CODE"] == 200' + loop: '{{ result.response }}' + when: IT_CONTEXT is not defined diff --git a/tests/sanity/ignore-2.10.txt b/tests/sanity/ignore-2.10.txt index d86f3a0a9..0963270f6 100644 --- a/tests/sanity/ignore-2.10.txt +++ b/tests/sanity/ignore-2.10.txt @@ -8,5 +8,6 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_resource_manager.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_rest.py import-2.6!skip -plugins/modules/dcnm_rest.py import-2.7!skip \ No newline at end of file +plugins/modules/dcnm_rest.py import-2.7!skip diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 94d404ba2..0513b0c04 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -8,6 +8,7 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_resource_manager.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_rest.py import-2.6!skip plugins/modules/dcnm_rest.py import-2.7!skip plugins/httpapi/dcnm.py import-2.7!skip @@ -15,4 +16,4 @@ plugins/httpapi/dcnm.py import-3.5!skip plugins/httpapi/dcnm.py import-3.6!skip plugins/httpapi/dcnm.py import-3.7!skip plugins/httpapi/dcnm.py import-3.8!skip -plugins/httpapi/dcnm.py import-3.9!skip \ No newline at end of file +plugins/httpapi/dcnm.py import-3.9!skip diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index f54169225..2a623b353 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -8,8 +8,9 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_resource_manager.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_rest.py import-2.6!skip plugins/modules/dcnm_rest.py import-2.7!skip plugins/httpapi/dcnm.py import-3.8!skip plugins/httpapi/dcnm.py import-3.9!skip -plugins/httpapi/dcnm.py import-3.10!skip \ No newline at end of file +plugins/httpapi/dcnm.py import-3.10!skip diff --git a/tests/sanity/ignore-2.9.txt b/tests/sanity/ignore-2.9.txt index d86f3a0a9..0963270f6 100644 --- a/tests/sanity/ignore-2.9.txt +++ b/tests/sanity/ignore-2.9.txt @@ -8,5 +8,6 @@ plugins/modules/dcnm_service_node.py validate-modules:missing-gplv3-license # GP plugins/modules/dcnm_template.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_route_peering.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_service_policy.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module +plugins/modules/dcnm_resource_manager.py validate-modules:missing-gplv3-license # GPLv3 license header not found in the first 20 lines of the module plugins/modules/dcnm_rest.py import-2.6!skip -plugins/modules/dcnm_rest.py import-2.7!skip \ No newline at end of file +plugins/modules/dcnm_rest.py import-2.7!skip diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_configs.json b/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_configs.json new file mode 100644 index 000000000..c86121293 --- /dev/null +++ b/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_configs.json @@ -0,0 +1,431 @@ +{ + "mock_fab_inv_data": { + "192.168.123.150": { + "isVpcConfigured": "True", + "vpcDomain": 1 + }, + "192.168.123.151": { + "isVpcConfigured": "True", + "vpcDomain": 1 + } + }, + + "mock_ip_sn" : { + "192.168.123.150": "9M99N34RDED", + "192.168.123.151": "9NXHSNTEO6C" + }, + + "create_rm_config": [ + { + "entity_name": "l3_vni_fabric", + "pool_name": "L3_VNI", + "pool_type": "ID", + "resource": "101", + "scope_type": "fabric" + }, + { + "entity_name": "l2_vni_fabric", + "pool_name": "L2_VNI", + "pool_type": "ID", + "resource": "102", + "scope_type": "fabric" + }, + { + "entity_name": "loopback_dev", + "pool_name": "LOOPBACK_ID", + "pool_type": "ID", + "resource": "200", + "scope_type": "device", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "9M99N34RDED~9NXHSNTEO6C", + "pool_name": "VPC_ID", + "pool_type": "ID", + "resource": "500", + "scope_type": "device_pair", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "mmudigon-2", + "pool_name": "LOOPBACK0_IP_POOL", + "pool_type": "IP", + "resource": "110.1.1.1", + "scope_type": "fabric" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/10", + "pool_name": "LOOPBACK1_IP_POOL", + "pool_type": "IP", + "resource": "fe:80::04", + "scope_type": "device_interface", + "switch": [ + "192.168.123.150" + ] + }, + { + "entity_name": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "pool_name": "SUBNET", + "pool_type": "SUBNET", + "resource": "fe:80:0505::05/64", + "scope_type": "link", + "switch": [ + "192.168.123.150" + ] + }], + + "modify_rm_config": [ + { + "entity_name": "l3_vni_fabric", + "pool_name": "L3_VNI", + "pool_type": "ID", + "resource": "1001", + "scope_type": "fabric" + }, + { + "entity_name": "l2_vni_fabric", + "pool_name": "L2_VNI", + "pool_type": "ID", + "resource": "1002", + "scope_type": "fabric" + }, + { + "entity_name": "loopback_dev", + "pool_name": "LOOPBACK_ID", + "pool_type": "ID", + "resource": "1003", + "scope_type": "device", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "9M99N34RDED~9NXHSNTEO6C", + "pool_name": "VPC_ID", + "pool_type": "ID", + "resource": "1005", + "scope_type": "device_pair", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "mmudigon-2", + "pool_name": "LOOPBACK0_IP_POOL", + "pool_type": "IP", + "resource": "111.1.1.1", + "scope_type": "fabric" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/10", + "pool_name": "LOOPBACK1_IP_POOL", + "pool_type": "IP", + "resource": "fe:81::04", + "scope_type": "device_interface", + "switch": [ + "192.168.123.150" + ] + }, + { + "entity_name": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "pool_name": "SUBNET", + "pool_type": "SUBNET", + "resource": "fe:81:0505::05/64", + "scope_type": "link", + "switch": [ + "192.168.123.150" + ] + }], + + "delete_rm_config": [ + { + "entity_name": "l3_vni_fabric", + "pool_name": "L3_VNI", + "pool_type": "ID", + "scope_type": "fabric" + }, + { + "entity_name": "l2_vni_fabric", + "pool_name": "L2_VNI", + "pool_type": "ID", + "scope_type": "fabric" + }, + { + "entity_name": "loopback_dev", + "pool_name": "LOOPBACK_ID", + "pool_type": "ID", + "scope_type": "device", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "9M99N34RDED~9NXHSNTEO6C", + "pool_name": "VPC_ID", + "pool_type": "ID", + "scope_type": "device_pair", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }, + { + "entity_name": "mmudigon-2", + "pool_name": "LOOPBACK0_IP_POOL", + "pool_type": "IP", + "scope_type": "fabric" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/10", + "pool_name": "LOOPBACK1_IP_POOL", + "pool_type": "IP", + "scope_type": "device_interface", + "switch": [ + "192.168.123.150" + ] + }, + { + "entity_name": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "pool_name": "SUBNET", + "pool_type": "SUBNET", + "scope_type": "link", + "switch": [ + "192.168.123.150" + ] + }], + + "query_rm_with_non_exist_entity_name_config": [ + { + "entity_name": "non_exist_en_1" + }, + { + "entity_name": "non_exist_en_2" + }, + { + "entity_name": "non_exist_en_3" + }], + + "query_rm_with_entity_name_config": [ + { + "entity_name": "bgp_asn_id_fabric" + }, + { + "entity_name": "l3_vni_fabric" + }, + { + "entity_name": "l2_vni_fabric" + }, + { + "entity_name": "loopback_dev" + }, + { + "entity_name": "9M99N34RDED~9NXHSNTEO6C" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/10" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/2~9NXHSNTEO6C~Ethernet1/2" + }, + { + "entity_name": "mmudigon-2" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/20" + }, + { + "entity_name": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3" + } + ], + + "query_rm_with_switch_config": [ + { + "switch": [ + "192.168.123.150" + ] + } + ], + + "query_rm_with_poolname_config": [ + { + "pool_name": "BGP_ASN_ID" + }, + { + "pool_name": "L3_VNI" + }, + { + "pool_name": "L2_VNI" + }, + { + "pool_name": "LOOPBACK_ID" + }, + { + "pool_name": "VPC_PEER_LINK_VLAN" + }, + { + "pool_name": "VPC_ID" + }, + { + "pool_name": "LOOPBACK0_IP_POOL" + }, + { + "pool_name": "LOOPBACK1_IP_POOL" + }, + { + "pool_name": "SUBNET" + } + ], + + "query_rm_with_poolname_and_switch_config": [ + { + "pool_name": "BGP_ASN_ID", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "L3_VNI", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "L2_VNI", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "LOOPBACK_ID", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "VPC_PEER_LINK_VLAN", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "VPC_ID", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "LOOPBACK0_IP_POOL", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "LOOPBACK1_IP_POOL", + "switch": [ + "192.168.123.150" + ] + }, + { + "pool_name": "SUBNET", + "switch": [ + "192.168.123.150" + ] + } + ], + + "query_rm_with_mixed_config": [ + { + "entity_name": "l2_vni_fabric" + }, + { + "switch": [ + "192.168.123.150" + ] + }, + { + "switch": [ + "192.168.123.151" + ] + }, + { + "pool_name": "LOOPBACK_ID" + }, + { + "pool_name": "VPC_ID", + "switch": [ + "192.168.123.150" + ] + }], + + "create_rm_inv_l2vni_config": [ + { + "entity_name": "l2_vni_fabric", + "pool_name": "L2_VNI", + "pool_type": "ID", + "resource": "102", + "scope_type": "link" + }], + "create_rm_inv_ldev_config": [ + { + "entity_name": "loopback_dev", + "pool_name": "LOOPBACK_ID", + "pool_type": "ID", + "resource": "200", + "scope_type": "device_pair", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }], + "create_rm_inv_vpcid_config": [ + { + "entity_name": "9M99N34RDED~9NXHSNTEO6C", + "pool_name": "VPC_ID", + "pool_type": "ID", + "resource": "500", + "scope_type": "device_interface", + "switch": [ + "192.168.123.150", + "192.168.123.151" + ] + }], + "create_rm_inv_lip0_config": [ + { + "entity_name": "mmudigon-2", + "pool_name": "LOOPBACK0_IP_POOL", + "pool_type": "IP", + "resource": "110.1.1.1", + "scope_type": "device" + }], + "create_rm_inv_lip1_config": [ + { + "entity_name": "9M99N34RDED~Ethernet1/10", + "pool_name": "LOOPBACK1_IP_POOL", + "pool_type": "IP", + "resource": "fe:80::04", + "scope_type": "link", + "switch": [ + "192.168.123.150" + ] + }], + "create_rm_inv_subnet_config": [ + { + "entity_name": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "pool_name": "SUBNET", + "pool_type": "SUBNET", + "resource": "fe:80:0505::05/64", + "scope_type": "fabric", + "switch": [ + "192.168.123.150" + ] + }] +} diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_payloads.json b/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_payloads.json new file mode 100644 index 000000000..4a009456d --- /dev/null +++ b/tests/unit/modules/dcnm/fixtures/dcnm_res_manager_payloads.json @@ -0,0 +1,1276 @@ +{ + "create_rm_l3vni_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 101, + "dbId": 20547 + } + }, + "create_rm_l2vni_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 102, + "dbId": 20548 + } + }, + "create_rm_id_dev_sw1_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 200, + "dbId": 20549 + } + }, + "create_rm_id_dev_sw2_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 200, + "dbId": 20550 + } + }, + "create_rm_vpcid_dev_pair_sw1_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 500, + "dbId": 20551 + } + }, + "create_rm_vpcid_dev_pair_sw2_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": 500, + "dbId": 20552 + } + }, + "create_rm_ip_fabric_resp":{ + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": "110.1.1.1", + "dbId": 20553 + } + }, + "create_rm_ip_dev_int_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": "fe:80::4", + "dbId": 20554 + } + }, + "create_rm_subnet_link_resp": + { + "RETURN_CODE": 200, + "METHOD": "POST", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/resources", + "MESSAGE": "OK", + "DATA": { + "resource": "fe:80:505::5/64", + "dbId": 20555 + } + }, + "delete_rm_resp": + { + "RETURN_CODE": 200, + "METHOD": "DELETE", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/resources?id=20574,20575,20576,20577,20578,20579,20580,20581,20582", + "MESSAGE": "OK", + "DATA": { + "successList": [ + 20574, + 20575, + 20576, + 20577, + 20578, + 20579, + 20580, + 20581, + 20582 + ] + } + } , + "get_rm_id_l3vni_resp":{ + + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20574, + "resourcePool": { + "id": 0, + "poolName": "L3_VNI", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L3_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "l3_vni_fabric", + "allocatedIp": "101", + "allocatedOn": 1649049896210, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }] + }, + "get_rm_id_l2vni_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20575, + "resourcePool": { + "id": 0, + "poolName": "L2_VNI", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L2_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "l2_vni_fabric", + "allocatedIp": "102", + "allocatedOn": 1649049898599, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }] + }, + "get_rm_id_dev_sw1_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20576, + "resourcePool": { + "id": 0, + "poolName": "LOOPBACK_ID", + "fabricName": "None", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback_dev", + "allocatedIp": "200", + "allocatedOn": 1649049900999, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }] + }, + "get_rm_id_dev_sw2_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20577, + "resourcePool": { + "id": 0, + "poolName": "LOOPBACK_ID", + "fabricName": "None", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback_dev", + "allocatedIp": "200", + "allocatedOn": 1649049903213, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }] + }, + "get_rm_vpcid_dev_pair_sw1_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20578, + "resourcePool": { + "id": 0, + "poolName": "VPC_ID", + "fabricName": "None", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "500", + "allocatedOn": 1649049905593, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }] + }, + "get_rm_vpcid_dev_pair_sw2_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20579, + "resourcePool": { + "id": 0, + "poolName": "VPC_ID", + "fabricName": "None", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "500", + "allocatedOn": 1649049905603, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }] + }, + "get_rm_ip_fabric_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20580, + "resourcePool": { + "id": 0, + "poolName": "LOOPBACK0_IP_POOL", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "mmudigon-2", + "allocatedIp": "110.1.1.1", + "allocatedOn": 1649049910202, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon-2", + "ipAddress": "", + "switchName": "" + }] + }, + "get_rm_ip_dev_int_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20581, + "resourcePool": { + "id": 0, + "poolName": "LOOPBACK1_IP_POOL", + "fabricName": "None", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~Ethernet1/10", + "allocatedIp": "fe:80::4", + "allocatedOn": 1649049912705, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }] + }, + "get_rm_subnet_link_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon/pools", + "MESSAGE": "OK", + "DATA":[{ + "id": 20582, + "resourcePool": { + "id": 0, + "poolName": "SUBNET", + "fabricName": "None", + "vrfName": "None", + "poolType": "SUBNET_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Link", + "entityName": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "allocatedIp": "fe:80:505::5/64", + "allocatedOn": 1649049915007, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }] + }, + + "query_rm_resp":{ + "RETURN_CODE": 200, + "METHOD": "GET", + "REQUEST_PATH": "https://10.195.225.193:443/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/fabrics/mmudigon", + "MESSAGE": "OK", + "DATA": [ + { + "id": 2480, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback0", + "allocatedIp": "0", + "allocatedOn": 1635405579291, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 2486, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback1", + "allocatedIp": "1", + "allocatedOn": 1635405580801, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 12893, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_VRF_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "MY_IT_VRF_71", + "allocatedIp": "2001", + "allocatedOn": 1639480642281, + "allocatedFlag": false, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 13296, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_NETWORK_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "vlan3000", + "allocatedIp": "3000", + "allocatedOn": 1642482549483, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19858, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback0", + "allocatedIp": "0", + "allocatedOn": 1646833111581, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19860, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback1", + "allocatedIp": "1", + "allocatedOn": 1646833112183, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19884, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_VRF_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "MY_IT_VRF_11", + "allocatedIp": "2000", + "allocatedOn": 1646889683549, + "allocatedFlag": false, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19886, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_VRF_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "MY_IT_VRF_21", + "allocatedIp": "2001", + "allocatedOn": 1646889689719, + "allocatedFlag": false, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19961, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_VRF_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "MY_IT_VRF_11", + "allocatedIp": "2000", + "allocatedOn": 1647407756432, + "allocatedFlag": false, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19981, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_PEER_LINK_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "3600", + "allocatedOn": 1647513662346, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19982, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_PEER_LINK_VLAN", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "3600", + "allocatedOn": 1647513662346, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19983, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "PORT_CHANNEL_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "port-channel500", + "allocatedIp": "500", + "allocatedOn": 1647513662364, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19987, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "PORT_CHANNEL_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "port-channel500", + "allocatedIp": "500", + "allocatedOn": 1647513666451, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20289, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "Loopback100", + "allocatedIp": "100", + "allocatedOn": 1648056311906, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20292, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "PORT_CHANNEL_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "Port-channel300", + "allocatedIp": "300", + "allocatedOn": 1648056318112, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20293, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C~vPC750", + "allocatedIp": "750", + "allocatedOn": 1648056323413, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20294, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C~vPC750", + "allocatedIp": "750", + "allocatedOn": 1648056323418, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20295, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "PORT_CHANNEL_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "vPC750", + "allocatedIp": "310", + "allocatedOn": 1648056323743, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20296, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "PORT_CHANNEL_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "vPC750", + "allocatedIp": "310", + "allocatedOn": 1648056324100, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20594, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback_dev", + "allocatedIp": "200", + "allocatedOn": 1649060112224, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20595, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "LOOPBACK_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Device", + "entityName": "loopback_dev", + "allocatedIp": "200", + "allocatedOn": 1649060114523, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20596, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "500", + "allocatedOn": 1649060116773, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20597, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DevicePair", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "500", + "allocatedOn": 1649060116777, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20291, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "TOP_DOWN_L3_DOT1Q", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "Ethernet1/1.10", + "allocatedIp": "10", + "allocatedOn": 1648056315208, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19980, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "VPC_DOMAIN_ID", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "9M99N34RDED~9NXHSNTEO6C", + "allocatedIp": "1", + "allocatedOn": 1647513662288, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20013, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L3_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "MyVRF_50000", + "allocatedIp": "50000", + "allocatedOn": 1647927265256, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20014, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L2_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "MyNetwork_30000", + "allocatedIp": "30000", + "allocatedOn": 1647927279161, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20592, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L3_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "l3_vni_fabric", + "allocatedIp": "101", + "allocatedOn": 1649060107501, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20593, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "L2_VNI", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "l2_vni_fabric", + "allocatedIp": "102", + "allocatedOn": 1649060109931, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon", + "ipAddress": "", + "switchName": "" + }, + { + "id": 2478, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "ANYCAST_RP", + "allocatedIp": "10.254.254.1", + "allocatedOn": 1635405578997, + "allocatedFlag": true, + "allocatedScopeValue": "ANYCAST_RP", + "ipAddress": "", + "switchName": "" + }, + { + "id": 2479, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~loopback0", + "allocatedIp": "10.2.0.1", + "allocatedOn": 1635405579238, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 2485, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~loopback1", + "allocatedIp": "10.3.0.2", + "allocatedOn": 1635405580751, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19857, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9NXHSNTEO6C~loopback0", + "allocatedIp": "10.2.0.2", + "allocatedOn": 1646833111520, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19859, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9NXHSNTEO6C~loopback1", + "allocatedIp": "10.3.0.1", + "allocatedOn": 1646833112133, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 19984, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "SUBNET_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Link", + "entityName": "9M99N34RDED~Vlan3600~9NXHSNTEO6C~Vlan3600", + "allocatedIp": "10.4.0.0/30", + "allocatedOn": 1647513664206, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19985, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~Vlan3600", + "allocatedIp": "10.4.0.1", + "allocatedOn": 1647513664224, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19986, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~9NXHSNTEO6C~loopback1", + "allocatedIp": "10.3.0.3", + "allocatedOn": 1647513664287, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 19988, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9NXHSNTEO6C~Vlan3600", + "allocatedIp": "10.4.0.2", + "allocatedOn": 1647513668192, + "allocatedFlag": true, + "allocatedScopeValue": "9NXHSNTEO6C", + "ipAddress": "192.168.123.151", + "switchName": "n9kv-200" + }, + { + "id": 20015, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "MyNetwork_30000", + "allocatedIp": "239.1.1.1", + "allocatedOn": 1647927279169, + "allocatedFlag": true, + "allocatedScopeValue": "MyNetwork_30000", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20290, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~loopback100", + "allocatedIp": "192.168.1.1", + "allocatedOn": 1648056312299, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20598, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Fabric", + "entityName": "mmudigon-2", + "allocatedIp": "110.1.1.1", + "allocatedOn": 1649060121393, + "allocatedFlag": true, + "allocatedScopeValue": "mmudigon-2", + "ipAddress": "", + "switchName": "" + }, + { + "id": 20599, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "IP_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "DeviceInterface", + "entityName": "9M99N34RDED~Ethernet1/10", + "allocatedIp": "fe:80::4", + "allocatedOn": 1649060123699, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + }, + { + "id": 20600, + "resourcePool": { + "id": 0, + "poolName": "", + "fabricName": "mmudigon", + "vrfName": "None", + "poolType": "SUBNET_POOL", + "dynamicSubnetRange": "None", + "targetSubnet": 0, + "overlapAllowed": false + }, + "entityType": "Link", + "entityName": "9M99N34RDED~Ethernet1/3~9NXHSNTEO6C~Ethernet1/3", + "allocatedIp": "fe:80:505::5/64", + "allocatedOn": 1649060126019, + "allocatedFlag": true, + "allocatedScopeValue": "9M99N34RDED", + "ipAddress": "192.168.123.150", + "switchName": "n9kv-100" + } + ] + } +} diff --git a/tests/unit/modules/dcnm/test_dcnm_intf.py b/tests/unit/modules/dcnm/test_dcnm_intf.py index 40df8299a..f2f697288 100644 --- a/tests/unit/modules/dcnm/test_dcnm_intf.py +++ b/tests/unit/modules/dcnm/test_dcnm_intf.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch # from units.compat.mock import patch diff --git a/tests/unit/modules/dcnm/test_dcnm_inventory.py b/tests/unit/modules/dcnm/test_dcnm_inventory.py index 3052f0267..60c9ea8d6 100644 --- a/tests/unit/modules/dcnm/test_dcnm_inventory.py +++ b/tests/unit/modules/dcnm/test_dcnm_inventory.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_inventory from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_policy.py b/tests/unit/modules/dcnm/test_dcnm_policy.py index 811e36780..1ba14f468 100644 --- a/tests/unit/modules/dcnm/test_dcnm_policy.py +++ b/tests/unit/modules/dcnm/test_dcnm_policy.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_policy from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_res_manager.py b/tests/unit/modules/dcnm/test_dcnm_res_manager.py new file mode 100644 index 000000000..48ed7ece5 --- /dev/null +++ b/tests/unit/modules/dcnm/test_dcnm_res_manager.py @@ -0,0 +1,1113 @@ +# Copyright (c) 2020-2022 Cisco and/or its affiliates. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Make coding more python3-ish +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from unittest.mock import patch + +from ansible_collections.cisco.dcnm.plugins.modules import dcnm_resource_manager +from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData + +import json +import copy + + +class TestDcnmResManagerModule(TestDcnmModule): + + module = dcnm_resource_manager + fd = None + + def init_data(self): + self.fd = None + + def log_msg(self, msg): + + if self.fd is None: + self.fd = open("rm-ut.log", "a+") + self.fd.write(msg) + + def setUp(self): + + super(TestDcnmResManagerModule, self).setUp() + + self.mock_dcnm_ip_sn = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_resource_manager.get_ip_sn_dict" + ) + self.run_dcnm_ip_sn = self.mock_dcnm_ip_sn.start() + + self.mock_dcnm_fabric_details = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_resource_manager.get_fabric_inventory_details" + ) + self.run_dcnm_fabric_details = self.mock_dcnm_fabric_details.start() + + self.mock_dcnm_send = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_resource_manager.dcnm_send" + ) + self.run_dcnm_send = self.mock_dcnm_send.start() + + self.mock_dcnm_version_supported = patch( + "ansible_collections.cisco.dcnm.plugins.modules.dcnm_resource_manager.dcnm_version_supported" + ) + self.run_dcnm_version_supported = ( + self.mock_dcnm_version_supported.start() + ) + + def tearDown(self): + + super(TestDcnmResManagerModule, self).tearDown() + self.mock_dcnm_send.stop() + self.mock_dcnm_version_supported.stop() + self.mock_dcnm_fabric_details.stop() + self.mock_dcnm_ip_sn.stop() + + # -------------------------- FIXTURES -------------------------- + + def load_rm_fixtures(self): + + if "test_dcnm_rm_merged_new" == self._testMethodName: + + get_rm_id_l3vni_resp = [] + get_rm_id_l2vni_resp = [] + get_rm_id_dev_sw1_resp = [] + get_rm_id_dev_sw2_resp = [] + get_rm_vpcid_dev_pair_sw1_resp = [] + get_rm_vpcid_dev_pair_sw2_resp = [] + get_rm_ip_fabric_resp = [] + get_rm_ip_dev_int_resp = [] + get_rm_subnet_link_resp = [] + create_rm_l3vni_resp = self.payloads_data.get( + "create_rm_l3vni_resp" + ) + create_rm_l2vni_resp = self.payloads_data.get( + "create_rm_l2vni_resp" + ) + create_rm_id_dev_sw1_resp = self.payloads_data.get( + "create_rm_id_dev_sw1_resp" + ) + create_rm_id_dev_sw2_resp = self.payloads_data.get( + "create_rm_id_dev_sw2_resp" + ) + create_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw1_resp" + ) + create_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw2_resp" + ) + create_rm_ip_fabric_resp = self.payloads_data.get( + "create_rm_ip_fabric_resp" + ) + create_rm_ip_dev_int_resp = self.payloads_data.get( + "create_rm_ip_dev_int_resp" + ) + create_rm_subnet_link_resp = self.payloads_data.get( + "create_rm_subnet_link_resp" + ) + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + create_rm_l3vni_resp, + create_rm_l2vni_resp, + create_rm_id_dev_sw1_resp, + create_rm_id_dev_sw2_resp, + create_rm_vpcid_dev_pair_sw1_resp, + create_rm_vpcid_dev_pair_sw2_resp, + create_rm_ip_fabric_resp, + create_rm_ip_dev_int_resp, + create_rm_subnet_link_resp, + ] + + if "test_dcnm_rm_merged_existing" == self._testMethodName: + + get_rm_id_l3vni_resp = self.payloads_data.get( + "get_rm_id_l3vni_resp" + ) + get_rm_id_l2vni_resp = self.payloads_data.get( + "get_rm_id_l2vni_resp" + ) + get_rm_id_dev_sw1_resp = self.payloads_data.get( + "get_rm_id_dev_sw1_resp" + ) + get_rm_id_dev_sw2_resp = self.payloads_data.get( + "get_rm_id_dev_sw2_resp" + ) + get_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw1_resp" + ) + get_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw2_resp" + ) + get_rm_ip_fabric_resp = self.payloads_data.get( + "get_rm_ip_fabric_resp" + ) + get_rm_ip_dev_int_resp = self.payloads_data.get( + "get_rm_ip_dev_int_resp" + ) + get_rm_subnet_link_resp = self.payloads_data.get( + "get_rm_subnet_link_resp" + ) + create_rm_l3vni_resp = self.payloads_data.get( + "create_rm_l3vni_resp" + ) + create_rm_l2vni_resp = self.payloads_data.get( + "create_rm_l2vni_resp" + ) + create_rm_id_dev_sw1_resp = self.payloads_data.get( + "create_rm_id_dev_sw1_resp" + ) + create_rm_id_dev_sw2_resp = self.payloads_data.get( + "create_rm_id_dev_sw2_resp" + ) + create_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw1_resp" + ) + create_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw2_resp" + ) + create_rm_ip_fabric_resp = self.payloads_data.get( + "create_rm_ip_fabric_resp" + ) + create_rm_ip_dev_int_resp = self.payloads_data.get( + "create_rm_ip_dev_int_resp" + ) + create_rm_subnet_link_resp = self.payloads_data.get( + "create_rm_subnet_link_resp" + ) + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + create_rm_l3vni_resp, + create_rm_l2vni_resp, + create_rm_id_dev_sw1_resp, + create_rm_id_dev_sw2_resp, + create_rm_vpcid_dev_pair_sw1_resp, + create_rm_vpcid_dev_pair_sw2_resp, + create_rm_ip_fabric_resp, + create_rm_ip_dev_int_resp, + create_rm_subnet_link_resp, + ] + + if "test_dcnm_rm_merged_new_no_state" == self._testMethodName: + + get_rm_id_l3vni_resp = [] + get_rm_id_l2vni_resp = [] + get_rm_id_dev_sw1_resp = [] + get_rm_id_dev_sw2_resp = [] + get_rm_vpcid_dev_pair_sw1_resp = [] + get_rm_vpcid_dev_pair_sw2_resp = [] + get_rm_ip_fabric_resp = [] + get_rm_ip_dev_int_resp = [] + get_rm_subnet_link_resp = [] + create_rm_l3vni_resp = self.payloads_data.get( + "create_rm_l3vni_resp" + ) + create_rm_l2vni_resp = self.payloads_data.get( + "create_rm_l2vni_resp" + ) + create_rm_id_dev_sw1_resp = self.payloads_data.get( + "create_rm_id_dev_sw1_resp" + ) + create_rm_id_dev_sw2_resp = self.payloads_data.get( + "create_rm_id_dev_sw2_resp" + ) + create_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw1_resp" + ) + create_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw2_resp" + ) + create_rm_ip_fabric_resp = self.payloads_data.get( + "create_rm_ip_fabric_resp" + ) + create_rm_ip_dev_int_resp = self.payloads_data.get( + "create_rm_ip_dev_int_resp" + ) + create_rm_subnet_link_resp = self.payloads_data.get( + "create_rm_subnet_link_resp" + ) + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + create_rm_l3vni_resp, + create_rm_l2vni_resp, + create_rm_id_dev_sw1_resp, + create_rm_id_dev_sw2_resp, + create_rm_vpcid_dev_pair_sw1_resp, + create_rm_vpcid_dev_pair_sw2_resp, + create_rm_ip_fabric_resp, + create_rm_ip_dev_int_resp, + create_rm_subnet_link_resp, + ] + + if "test_dcnm_rm_merged_new_check_mode" == self._testMethodName: + pass + + if ( + "test_dcnm_rm_merged_new_existing_and_non_existing" + == self._testMethodName + ): + + get_rm_id_l3vni_resp = [] + get_rm_id_l2vni_resp = [] + get_rm_id_dev_sw1_resp = self.payloads_data.get( + "get_rm_id_dev_sw1_resp" + ) + get_rm_id_dev_sw2_resp = self.payloads_data.get( + "get_rm_id_dev_sw2_resp" + ) + get_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw1_resp" + ) + get_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw2_resp" + ) + get_rm_ip_fabric_resp = self.payloads_data.get( + "get_rm_ip_fabric_resp" + ) + get_rm_ip_dev_int_resp = [] + get_rm_subnet_link_resp = [] + create_rm_l3vni_resp = self.payloads_data.get( + "create_rm_l3vni_resp" + ) + create_rm_l2vni_resp = self.payloads_data.get( + "create_rm_l2vni_resp" + ) + create_rm_id_dev_sw1_resp = self.payloads_data.get( + "create_rm_id_dev_sw1_resp" + ) + create_rm_id_dev_sw2_resp = self.payloads_data.get( + "create_rm_id_dev_sw2_resp" + ) + create_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw1_resp" + ) + create_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw2_resp" + ) + create_rm_ip_fabric_resp = self.payloads_data.get( + "create_rm_ip_fabric_resp" + ) + create_rm_ip_dev_int_resp = self.payloads_data.get( + "create_rm_ip_dev_int_resp" + ) + create_rm_subnet_link_resp = self.payloads_data.get( + "create_rm_subnet_link_resp" + ) + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + create_rm_l3vni_resp, + create_rm_l2vni_resp, + create_rm_id_dev_sw1_resp, + create_rm_id_dev_sw2_resp, + create_rm_vpcid_dev_pair_sw1_resp, + create_rm_vpcid_dev_pair_sw2_resp, + create_rm_ip_fabric_resp, + create_rm_ip_dev_int_resp, + create_rm_subnet_link_resp, + ] + + if "test_dcnm_rm_modify_existing" == self._testMethodName: + + get_rm_id_l3vni_resp = self.payloads_data.get( + "get_rm_id_l3vni_resp" + ) + get_rm_id_l2vni_resp = self.payloads_data.get( + "get_rm_id_l2vni_resp" + ) + get_rm_id_dev_sw1_resp = self.payloads_data.get( + "get_rm_id_dev_sw1_resp" + ) + get_rm_id_dev_sw2_resp = self.payloads_data.get( + "get_rm_id_dev_sw2_resp" + ) + get_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw1_resp" + ) + get_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw2_resp" + ) + get_rm_ip_fabric_resp = self.payloads_data.get( + "get_rm_ip_fabric_resp" + ) + get_rm_ip_dev_int_resp = self.payloads_data.get( + "get_rm_ip_dev_int_resp" + ) + get_rm_subnet_link_resp = self.payloads_data.get( + "get_rm_subnet_link_resp" + ) + create_rm_l3vni_resp = self.payloads_data.get( + "create_rm_l3vni_resp" + ) + create_rm_l2vni_resp = self.payloads_data.get( + "create_rm_l2vni_resp" + ) + create_rm_id_dev_sw1_resp = self.payloads_data.get( + "create_rm_id_dev_sw1_resp" + ) + create_rm_id_dev_sw2_resp = self.payloads_data.get( + "create_rm_id_dev_sw2_resp" + ) + create_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw1_resp" + ) + create_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "create_rm_vpcid_dev_pair_sw2_resp" + ) + create_rm_ip_fabric_resp = self.payloads_data.get( + "create_rm_ip_fabric_resp" + ) + create_rm_ip_dev_int_resp = self.payloads_data.get( + "create_rm_ip_dev_int_resp" + ) + create_rm_subnet_link_resp = self.payloads_data.get( + "create_rm_subnet_link_resp" + ) + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + create_rm_l3vni_resp, + create_rm_l2vni_resp, + create_rm_id_dev_sw1_resp, + create_rm_id_dev_sw2_resp, + create_rm_vpcid_dev_pair_sw1_resp, + create_rm_vpcid_dev_pair_sw2_resp, + create_rm_ip_fabric_resp, + create_rm_ip_dev_int_resp, + create_rm_subnet_link_resp, + ] + + if "test_dcnm_rm_delete_existing" == self._testMethodName: + + get_rm_id_l3vni_resp = self.payloads_data.get( + "get_rm_id_l3vni_resp" + ) + get_rm_id_l2vni_resp = self.payloads_data.get( + "get_rm_id_l2vni_resp" + ) + get_rm_id_dev_sw1_resp = self.payloads_data.get( + "get_rm_id_dev_sw1_resp" + ) + get_rm_id_dev_sw2_resp = self.payloads_data.get( + "get_rm_id_dev_sw2_resp" + ) + get_rm_vpcid_dev_pair_sw1_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw1_resp" + ) + get_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw2_resp" + ) + get_rm_ip_fabric_resp = self.payloads_data.get( + "get_rm_ip_fabric_resp" + ) + get_rm_ip_dev_int_resp = self.payloads_data.get( + "get_rm_ip_dev_int_resp" + ) + get_rm_subnet_link_resp = self.payloads_data.get( + "get_rm_subnet_link_resp" + ) + delete_rm_resp = self.payloads_data.get("delete_rm_resp") + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + delete_rm_resp, + ] + + if ( + "test_dcnm_rm_delete_existing_and_non_existing" + == self._testMethodName + ): + + get_rm_id_l3vni_resp = self.payloads_data.get( + "get_rm_id_l3vni_resp" + ) + get_rm_id_l2vni_resp = self.payloads_data.get( + "get_rm_id_l2vni_resp" + ) + get_rm_id_dev_sw1_resp = [] + get_rm_id_dev_sw2_resp = [] + get_rm_vpcid_dev_pair_sw1_resp = [] + get_rm_vpcid_dev_pair_sw2_resp = self.payloads_data.get( + "get_rm_vpcid_dev_pair_sw2_resp" + ) + get_rm_ip_fabric_resp = self.payloads_data.get( + "get_rm_ip_fabric_resp" + ) + get_rm_ip_dev_int_resp = [] + get_rm_subnet_link_resp = self.payloads_data.get( + "get_rm_subnet_link_resp" + ) + delete_rm_resp = self.payloads_data.get("delete_rm_resp") + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + delete_rm_resp, + ] + + if "test_dcnm_rm_delete_non_existing" == self._testMethodName: + + get_rm_id_l3vni_resp = [] + get_rm_id_l2vni_resp = [] + get_rm_id_dev_sw1_resp = [] + get_rm_id_dev_sw2_resp = [] + get_rm_vpcid_dev_pair_sw1_resp = [] + get_rm_vpcid_dev_pair_sw2_resp = [] + get_rm_ip_fabric_resp = [] + get_rm_ip_dev_int_resp = [] + get_rm_subnet_link_resp = [] + delete_rm_resp = self.payloads_data.get("delete_rm_resp") + + self.run_dcnm_send.side_effect = [ + get_rm_id_l3vni_resp, + get_rm_id_l2vni_resp, + get_rm_id_dev_sw1_resp, + get_rm_id_dev_sw2_resp, + get_rm_vpcid_dev_pair_sw1_resp, + get_rm_vpcid_dev_pair_sw2_resp, + get_rm_ip_fabric_resp, + get_rm_ip_dev_int_resp, + get_rm_subnet_link_resp, + delete_rm_resp, + ] + + if "test_dcnm_rm_query_no_config" == self._testMethodName: + + query_rm_resp = self.payloads_data.get("query_rm_resp") + self.run_dcnm_send.side_effect = [query_rm_resp] + + if "test_dcnm_rm_query_with_" in self._testMethodName: + + query_rm_resp = self.payloads_data.get("query_rm_resp") + self.run_dcnm_send.side_effect = [ + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + query_rm_resp, + ] + + def load_fixtures(self, response=None, device=""): + + self.run_dcnm_version_supported.side_effect = [11] + self.run_dcnm_fabric_details.side_effect = [self.mock_fab_inv] + self.run_dcnm_ip_sn.side_effect = [[self.mock_ip_sn, []]] + # Load resoure manager related side-effects + self.load_rm_fixtures() + + # -------------------------- FIXTURES END -------------------------- + # -------------------------- TEST-CASES ---------------------------- + + def test_dcnm_rm_merged_new(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("create_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 9) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_merged_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("create_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_merged_new_no_state(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("create_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args(dict(fabric="mmudigon", config=self.playbook_config)) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 9) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_merged_new_check_mode(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("create_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + _ansible_check_mode=True, + state="merged", + fabric="mmudigon", + config=self.playbook_config, + ) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 9) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_merged_new_existing_and_non_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("create_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 4) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_modify_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("modify_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 9) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate create responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_delete_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("delete_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="deleted", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 9) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate delete responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_delete_existing_and_non_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("delete_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="deleted", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=True, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 5) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate delete responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_delete_non_existing(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get("delete_rm_config") + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="deleted", fabric="mmudigon", config=self.playbook_config + ) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual(len(result["diff"][0]["query"]), 0) + + # Validate delete responses + for resp in result["response"]: + self.assertEqual(resp["RETURN_CODE"], 200) + + def test_dcnm_rm_query_no_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = [] + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_query_with_entity_name(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_entity_name_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_query_with_entity_name_not_exist(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_non_exist_entity_name_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) == 0), True) + + def test_dcnm_rm_query_with_switch(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_switch_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_query_with_pool_name(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_poolname_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_query_with_pool_name_and_switch(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_poolname_and_switch_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_query_with_mixed_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "query_rm_with_mixed_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict(state="query", fabric="mmudigon", config=self.playbook_config) + ) + result = self.execute_module(changed=False, failed=False) + + self.assertEqual(len(result["diff"][0]["merged"]), 0) + self.assertEqual(len(result["diff"][0]["deleted"]), 0) + self.assertEqual((len(result["response"]) > 0), True) + + def test_dcnm_rm_merge_l2dev_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_ldev_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) + + def test_dcnm_rm_merge_l2vni_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_l2vni_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) + + def test_dcnm_rm_merge_vpcid_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_vpcid_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) + + def test_dcnm_rm_merge_lip0_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_lip0_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) + + def test_dcnm_rm_merge_lip1_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_lip1_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) + + def test_dcnm_rm_merge_subnet_inv_config(self): + + # load the json from playbooks + self.config_data = loadPlaybookData("dcnm_res_manager_configs") + self.payloads_data = loadPlaybookData("dcnm_res_manager_payloads") + + # load required config data + self.playbook_config = self.config_data.get( + "create_rm_inv_subnet_config" + ) + self.mock_ip_sn = self.config_data.get("mock_ip_sn") + self.mock_fab_inv = self.config_data.get("mock_fab_inv_data") + + set_module_args( + dict( + state="merged", fabric="mmudigon", config=self.playbook_config + ) + ) + result = None + + try: + result = self.execute_module(changed=False, failed=False) + except Exception as e: + self.assertEqual(result, None) + self.assertEqual(("is not valid" in str(e)), True) diff --git a/tests/unit/modules/dcnm/test_dcnm_service_node.py b/tests/unit/modules/dcnm/test_dcnm_service_node.py index 8a0351c6c..52862460e 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_node.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_node.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_node from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_service_policy.py b/tests/unit/modules/dcnm/test_dcnm_service_policy.py index d1c0ae061..aedd8d5f5 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_policy.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_policy.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_policy from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py b/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py index 3b822e49b..c20338553 100644 --- a/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py +++ b/tests/unit/modules/dcnm/test_dcnm_service_route_peering.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_service_route_peering from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_template.py b/tests/unit/modules/dcnm/test_dcnm_template.py index d98e42aaa..e999667b7 100644 --- a/tests/unit/modules/dcnm/test_dcnm_template.py +++ b/tests/unit/modules/dcnm/test_dcnm_template.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch from ansible_collections.cisco.dcnm.plugins.modules import dcnm_template from .dcnm_module import TestDcnmModule, set_module_args, loadPlaybookData diff --git a/tests/unit/modules/dcnm/test_dcnm_vrf.py b/tests/unit/modules/dcnm/test_dcnm_vrf.py index a83ed7bc7..71edb5507 100644 --- a/tests/unit/modules/dcnm/test_dcnm_vrf.py +++ b/tests/unit/modules/dcnm/test_dcnm_vrf.py @@ -17,7 +17,7 @@ __metaclass__ = type -from ansible_collections.ansible.netcommon.tests.unit.compat.mock import patch +from unittest.mock import patch # from units.compat.mock import patch From 3db4c9ec877cd7996b6c40d6b7d82eda2a592f00 Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 10 May 2022 19:18:18 +0530 Subject: [PATCH 09/17] Fix for DCNM network attach in vpc paired switches (#148) * Fix for attaching ports to vpc switches * Fix for attaching ports to vpc switches * Fix for attaching ports to vpc switches * Fix for attaching ports to vpc switches * Fix for attaching ports to vpc switches --- plugins/modules/dcnm_network.py | 61 +++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/plugins/modules/dcnm_network.py b/plugins/modules/dcnm_network.py index d747bff6a..d191c2aef 100644 --- a/plugins/modules/dcnm_network.py +++ b/plugins/modules/dcnm_network.py @@ -574,6 +574,29 @@ def diff_for_attach_deploy(self, want_a, have_a, replace=False): del want["isAttached"] attach_list.append(want) + for attach in attach_list: + for ip, ser in self.ip_sn.items(): + if ser == attach["serialNumber"]: + ip_addr = ip + break + is_vpc = self.inventory_data[ip_addr].get("isVpcConfigured") + if is_vpc is True: + peer_found = False + peer_ser = self.inventory_data[ip_addr].get( + "peerSerialNumber" + ) + for attch in attach_list: + if peer_ser == attch["serialNumber"]: + peer_found = True + if not peer_found: + for hav in have_a: + if hav["serialNumber"] == peer_ser: + havtoattach = copy.deepcopy(hav) + havtoattach.update({"switchPorts": ""}) + del havtoattach["isAttached"] + attach_list.append(havtoattach) + break + return attach_list, dep_net def update_attach_params(self, attach, net_name, deploy): @@ -1154,6 +1177,44 @@ def get_want(self): self.update_attach_params(attach, net["net_name"], deploy) ) if networks: + for attch in net["attach"]: + for ip, ser in self.ip_sn.items(): + if ser == attch["serialNumber"]: + ip_address = ip + break + # deploy = attch["deployment"] + is_vpc = self.inventory_data[ip_address].get( + "isVpcConfigured" + ) + if is_vpc is True: + peer_found = False + peer_ser = self.inventory_data[ip_address].get( + "peerSerialNumber" + ) + for network in networks: + if peer_ser == network["serialNumber"]: + peer_found = True + break + if not peer_found: + msg = ( + "Switch {0} in fabric {1} is configured for vPC, " + "please attach the peer switch also to network" + .format(ip_address, self.fabric)) + self.module.fail_json(msg=msg) + # This code add the peer switch in vpc cases automatically + # As of now UI return error in such cases. Uncomment this if + # UI behaviour changes + # attach_dict = dict(ip_address="", ports=[], deploy=True) + # for ip, ser in self.ip_sn.items(): + # if ser == peer_ser: + # ip_addr = ip + # break + # attach_dict.update({"ip_address": ip_addr}) + # networks.append( + # self.update_attach_params( + # attach_dict, net["net_name"], deploy + # ) + # ) net_attach.update({"networkName": net["net_name"]}) net_attach.update({"lanAttachList": networks}) want_attach.append(net_attach) From 014a4e41e1b03a5c6e1067bf258de5a63deb336a Mon Sep 17 00:00:00 2001 From: Mike Wiebe Date: Tue, 28 Jun 2022 13:58:52 -0400 Subject: [PATCH 10/17] Remove import skip for ansible-test (#150) * Remove import skip for ansible-test * Constrict ansible base version matrix Testing to see if version 2.12.7 passes all tests * Expand matrix to include ansible version 2.12.7 * Update ansible version to 2.11.12 for sanity Update from version 2.11.8 to 2.11.12 for ansible sanity tests --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8a40d7f2c..b05b99d52 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] + ansible: [2.9.27, 2.10.17, 2.11.12, 2.12.7] steps: - name: Set up Python 3.8 uses: actions/setup-python@v1 @@ -71,7 +71,7 @@ jobs: run: ansible-galaxy collection install .cache/collection-tarballs/*.tar.gz - name: Run sanity tests - run: ansible-test sanity --docker --python 3.8 -v --color --truncate 0 --skip-test import + run: ansible-test sanity --docker --python 3.8 -v --color --truncate 0 working-directory: /home/runner/.ansible/collections/ansible_collections/cisco/dcnm From 56a9eafbace096f9185e04fbe7816dfa7f959cbb Mon Sep 17 00:00:00 2001 From: Mike Wiebe Date: Tue, 28 Jun 2022 14:33:35 -0400 Subject: [PATCH 11/17] Update ansible versions Update from version 2.11.8 and 2.11.2 to 2.11.12 and 2.12.7 for all CI stages --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b05b99d52..3ecfefc6b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] + ansible: [2.9.27, 2.10.17, 2.11.12, 2.12.7] steps: - name: Check out code uses: actions/checkout@v2 @@ -82,7 +82,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ansible: [2.9.27, 2.10.17, 2.11.8, 2.12.2] + ansible: [2.9.27, 2.10.17, 2.11.12, 2.12.7] steps: - name: Set up Python 3.8 uses: actions/setup-python@v1 From 4a7a75f098d7ae7ff1a8458addb10fdd385b29d9 Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Mon, 18 Jul 2022 21:11:15 +0530 Subject: [PATCH 12/17] DCNM api changes in modules and other changes (#152) * DCNM api changes in modules and other changes * DCNM api changes in modules and other changes * DCNM api changes in modules and other changes * DCNM api changes in modules and other changes * DCNM api changes in modules and other changes * DCNM api changes in modules and other changes --- docs/cisco.dcnm.dcnm_interface_module.rst | 8 +- docs/cisco.dcnm.dcnm_network_module.rst | 5 ++ docs/cisco.dcnm.dcnm_vrf_module.rst | 5 ++ plugins/module_utils/network/dcnm/dcnm.py | 4 +- plugins/modules/dcnm_interface.py | 69 +++++++++----- plugins/modules/dcnm_network.py | 90 +++++++++++-------- plugins/modules/dcnm_policy.py | 14 +-- plugins/modules/dcnm_service_route_peering.py | 10 +++ plugins/modules/dcnm_template.py | 4 +- plugins/modules/dcnm_vrf.py | 34 ++++--- .../dcnm/dcnm_intf_multi_intf_merge.yaml | 16 ++-- .../dcnm/dcnm_intf_no_optional_elems.yaml | 6 +- .../dcnm_network/tests/dcnm/deleted.yaml | 18 ++-- .../dcnm_network/tests/dcnm/merged.yaml | 6 +- .../dcnm_network/tests/dcnm/overridden.yaml | 10 +-- .../dcnm_network/tests/dcnm/query.yaml | 10 +-- .../dcnm_network/tests/dcnm/replaced.yaml | 22 ++--- .../dcnm_service_route_peering_replace.yaml | 38 ++++---- 18 files changed, 214 insertions(+), 155 deletions(-) diff --git a/docs/cisco.dcnm.dcnm_interface_module.rst b/docs/cisco.dcnm.dcnm_interface_module.rst index 6cf3e14e4..4ff779a9c 100644 --- a/docs/cisco.dcnm.dcnm_interface_module.rst +++ b/docs/cisco.dcnm.dcnm_interface_module.rst @@ -379,13 +379,11 @@ Parameters -
    Choices: -
  • default
  • -
  • jumbo ←
  • -
-
Interface MTU
+
Interface MTU.
+
Can be specified either "default" or "jumbo" for access and trunk interface types. If not specified, it defaults to "jumbo"
+
Can be specified with any value within 576 and 9216 for routed interface types. If not specified, it defaults to 9216
diff --git a/docs/cisco.dcnm.dcnm_network_module.rst b/docs/cisco.dcnm.dcnm_network_module.rst index dc1cfa495..8a3e2d616 100644 --- a/docs/cisco.dcnm.dcnm_network_module.rst +++ b/docs/cisco.dcnm.dcnm_network_module.rst @@ -107,6 +107,7 @@ Parameters
Per switch knob to control whether to deploy the attachment
+
This knob has been deprecated from Ansible NDFC Collection Version 2.1.0 onwards. There will not be any functional impact if specified in playbook.
@@ -164,6 +165,10 @@ Parameters
Global knob to control whether to deploy the attachment
+
Ansible NDFC Collection Behavior for Version 2.0.1 and earlier
+
This knob will create and deploy the attachment in DCNM only when set to "True" in playbook
+
Ansible NDFC Collection Behavior for Version 2.1.0 and later
+
Attachments specified in the playbook will always be created in DCNM. This knob, when set to "True", will deploy the attachment in DCNM, by pushing the configs to switch. If set to "False", the attachments will be created in DCNM, but will not be deployed
diff --git a/docs/cisco.dcnm.dcnm_vrf_module.rst b/docs/cisco.dcnm.dcnm_vrf_module.rst index 67ee07725..25590ebbc 100644 --- a/docs/cisco.dcnm.dcnm_vrf_module.rst +++ b/docs/cisco.dcnm.dcnm_vrf_module.rst @@ -87,6 +87,7 @@ Parameters
Per switch knob to control whether to deploy the attachment
+
This knob has been deprecated from Ansible NDFC Collection Version 2.1.0 onwards. There will not be any functional impact if specified in playbook.
@@ -281,6 +282,10 @@ Parameters
Global knob to control whether to deploy the attachment
+
Ansible NDFC Collection Behavior for Version 2.0.1 and earlier
+
This knob will create and deploy the attachment in DCNM only when set to "True" in playbook
+
Ansible NDFC Collection Behavior for Version 2.1.0 and later
+
Attachments specified in the playbook will always be created in DCNM. This knob, when set to "True", will deploy the attachment in DCNM, by pushing the configs to switch. If set to "False", the attachments will be created in DCNM, but will not be deployed
diff --git a/plugins/module_utils/network/dcnm/dcnm.py b/plugins/module_utils/network/dcnm/dcnm.py index a218b436f..73ffcbed0 100644 --- a/plugins/module_utils/network/dcnm/dcnm.py +++ b/plugins/module_utils/network/dcnm/dcnm.py @@ -157,7 +157,7 @@ def get_fabric_inventory_details(module, fabric): conn = Connection(module._socket_path) if conn.get_version() == 12: - path = "/appcenter/cisco/ndfc/v1/lan-fabric" + path + path = "/appcenter/cisco/ndfc/api/v1/lan-fabric" + path count = 1 while rc is False: @@ -326,7 +326,7 @@ def get_fabric_details(module, fabric): conn = Connection(module._socket_path) if conn.get_version() == 12: - path = "/appcenter/cisco/ndfc/v1/lan-fabric" + path + path = "/appcenter/cisco/ndfc/api/v1/lan-fabric" + path count = 1 while rc is False: diff --git a/plugins/modules/dcnm_interface.py b/plugins/modules/dcnm_interface.py index a2f7416ba..70be35372 100644 --- a/plugins/modules/dcnm_interface.py +++ b/plugins/modules/dcnm_interface.py @@ -401,10 +401,12 @@ default: true mtu: description: - - Interface MTU + - Interface MTU. + - Can be specified either "default" or "jumbo" for access and + trunk interface types. If not specified, it defaults to "jumbo" + - Can be specified with any value within 576 and 9216 for routed interface + types. If not specified, it defaults to 9216 type: str - choices: ['default', 'jumbo'] - default: jumbo allowed_vlans: description: - Vlans that are allowed on this interface. @@ -1017,14 +1019,16 @@ class DcnmIntf: "GLOBAL_IF": "/rest/globalInterface", "GLOBAL_IF_DEPLOY": "/rest/globalInterface/deploy", "INTERFACE": "/rest/interface", + "IF_MARK_DELETE": "/rest/globalInterface", }, 12: { - "VPC_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/vpcpair_serial_number?serial_number={}", - "IF_WITH_SNO_IFNAME": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface?serialNumber={}&ifName={}", - "IF_DETAIL_WITH_SNO": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface/detail?serialNumber={}", - "GLOBAL_IF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface", - "GLOBAL_IF_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/globalInterface/deploy", - "INTERFACE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/interface", + "VPC_SNO": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/interface/vpcpair_serial_number?serial_number={}", + "IF_WITH_SNO_IFNAME": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/interface?serialNumber={}&ifName={}", + "IF_DETAIL_WITH_SNO": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/interface/detail?serialNumber={}", + "GLOBAL_IF": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/globalInterface", + "GLOBAL_IF_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/globalInterface/deploy", + "INTERFACE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/interface", + "IF_MARK_DELETE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/interface/markdelete", }, } @@ -1464,7 +1468,7 @@ def dcnm_intf_validate_ethernet_interface_input(self, cfg): mode=dict(required=True, type="str"), bpdu_guard=dict(type="str", default="true"), port_type_fast=dict(type="bool", default=True), - mtu=dict(type="str", default="jumbo"), + mtu=dict(type="str", default="jumbo", choices=["jumbo", "default"]), speed=dict(type="str", default="Auto"), allowed_vlans=dict(type="str", default="none"), cmds=dict(type="list"), @@ -1476,7 +1480,7 @@ def dcnm_intf_validate_ethernet_interface_input(self, cfg): mode=dict(required=True, type="str"), bpdu_guard=dict(type="str", default="true"), port_type_fast=dict(type="bool", default=True), - mtu=dict(type="str", default="jumbo"), + mtu=dict(type="str", default="jumbo", choices=["jumbo", "default"]), speed=dict(type="str", default="Auto"), access_vlan=dict(type="str", default=""), cmds=dict(type="list"), @@ -2339,7 +2343,7 @@ def dcnm_intf_compare_want_and_have(self, state): break if res == "copy_and_add": want[k][0][ik][nk] = d[k][0][ik][nk] - changed_dict[k][0][ik][nk] = d[k][0][ik][nk] + continue if (res == "merge_and_add"): want[k][0][ik][nk] = self.dcnm_intf_merge_want_and_have(nk, want[k][0][ik][nk], d[k][0][ik][nk]) changed_dict[k][0][ik][nk] = want[k][0][ik][nk] @@ -2366,7 +2370,7 @@ def dcnm_intf_compare_want_and_have(self, state): break if res == "copy_and_add": want[k][0][ik] = d[k][0][ik] - changed_dict[k][0][ik] = d[k][0][ik] + continue if (res == "merge_and_add"): want[k][0][ik] = self.dcnm_intf_merge_want_and_have(ik, want[k][0][ik], d[k][0][ik]) changed_dict[k][0][ik] = want[k][0][ik] @@ -2383,7 +2387,7 @@ def dcnm_intf_compare_want_and_have(self, state): if res == "copy_and_add": want[k] = d[k] - changed_dict[k] = d[k] + continue if (res == "merge_and_add"): want[k] = self.dcnm_intf_merge_want_and_have(k, want[k], d[k]) changed_dict[k] = want[k] @@ -3117,7 +3121,7 @@ def dcnm_intf_send_message_to_dcnm(self): deploy = False replace = False - path = self.paths["GLOBAL_IF"] + path = self.paths["IF_MARK_DELETE"] # First send deletes and then try create and update. This is because during override, the overriding # config may conflict with existing configuration. @@ -3127,7 +3131,20 @@ def dcnm_intf_send_message_to_dcnm(self): if delem == []: continue - json_payload = json.dumps(delem) + if (self.dcnm_version < 12): + json_payload = json.dumps(delem) + else: + send_payload = copy.deepcopy(delem) + [ + [ + item.pop("interfaceType"), + item.pop("fabricName"), + item.pop("interfaceDbId"), + ] + for item in send_payload + ] + json_payload = json.dumps(send_payload) + resp = dcnm_send(self.module, "DELETE", path, json_payload) if resp.get("RETURN_CODE") != 200: @@ -3178,14 +3195,22 @@ def dcnm_intf_send_message_to_dcnm(self): if index != self.int_index["INTERFACE_VPC"]: # Deploy just requires ifName and serialNumber - [ + if (self.dcnm_version < 12): [ - item.pop("interfaceType"), - item.pop("fabricName"), - item.pop("interfaceDbId"), + [ + item.pop("interfaceType"), + item.pop("fabricName"), + item.pop("interfaceDbId"), + ] + for item in delem + ] + else: + [ + [ + item.pop("interfaceDbId"), + ] + for item in delem ] - for item in delem - ] else: [ [item.pop("interfaceType"), item.pop("interfaceDbId")] diff --git a/plugins/modules/dcnm_network.py b/plugins/modules/dcnm_network.py index d191c2aef..05ef1fa29 100644 --- a/plugins/modules/dcnm_network.py +++ b/plugins/modules/dcnm_network.py @@ -181,11 +181,19 @@ deploy: description: - Per switch knob to control whether to deploy the attachment + - This knob has been deprecated from Ansible NDFC Collection Version 2.1.0 onwards. + There will not be any functional impact if specified in playbook. type: bool default: true deploy: description: - Global knob to control whether to deploy the attachment + - Ansible NDFC Collection Behavior for Version 2.0.1 and earlier + - This knob will create and deploy the attachment in DCNM only when set to "True" in playbook + - Ansible NDFC Collection Behavior for Version 2.1.0 and later + - Attachments specified in the playbook will always be created in DCNM. + This knob, when set to "True", will deploy the attachment in DCNM, by pushing the configs to switch. + If set to "False", the attachments will be created in DCNM, but will not be deployed type: bool default: true """ @@ -408,12 +416,12 @@ class DcnmNetwork: "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", }, 12: { - "GET_VRF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", - "GET_VRF_NET": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks?vrf-name={}", - "GET_NET_ATTACH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks/attachments?network-names={}", + "GET_VRF": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", + "GET_VRF_NET": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/networks?vrf-name={}", + "GET_NET_ATTACH": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/networks/attachments?network-names={}", "GET_NET_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/netinfo", - "GET_NET": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks", - "GET_NET_NAME": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/networks/{}", + "GET_NET": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/networks", + "GET_NET_NAME": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/networks/{}", "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", }, } @@ -570,7 +578,7 @@ def diff_for_attach_deploy(self, want_a, have_a, replace=False): dep_net = True if not found: - if bool(want["deployment"]): + if bool(want["isAttached"]): del want["isAttached"] attach_list.append(want) @@ -637,7 +645,7 @@ def update_attach_params(self, attach, net_name, deploy): attach.update({"dot1QVlan": 0}) attach.update({"untagged": False}) attach.update({"deployment": deploy}) - attach.update({"isAttached": deploy}) + attach.update({"isAttached": True}) attach.update({"extensionValues": ""}) attach.update({"instanceValues": ""}) attach.update({"freeformConfig": ""}) @@ -1041,32 +1049,33 @@ def get_have(self): if networks_per_navrf.get("DATA"): for l2net in networks_per_navrf["DATA"]: json_to_dict = json.loads(l2net["networkTemplateConfig"]) - t_conf = { - "vlanId": json_to_dict.get("vlanId", ""), - "gatewayIpAddress": json_to_dict.get("gatewayIpAddress", ""), - "isLayer2Only": json_to_dict.get("isLayer2Only", False), - "tag": json_to_dict.get("tag", ""), - "vlanName": json_to_dict.get("vlanName", ""), - "intfDescription": json_to_dict.get("intfDescription", ""), - "mtu": json_to_dict.get("mtu", ""), - "suppressArp": json_to_dict.get("suppressArp", False), - "dhcpServerAddr1": json_to_dict.get("dhcpServerAddr1", ""), - "dhcpServerAddr2": json_to_dict.get("dhcpServerAddr2", ""), - "dhcpServerAddr3": json_to_dict.get("dhcpServerAddr3", ""), - "vrfDhcp": json_to_dict.get("vrfDhcp", ""), - "vrfDhcp2": json_to_dict.get("vrfDhcp2", ""), - "vrfDhcp3": json_to_dict.get("vrfDhcp3", ""), - "loopbackId": json_to_dict.get("loopbackId", ""), - } - - l2net.update({"networkTemplateConfig": json.dumps(t_conf)}) - del l2net["displayName"] - del l2net["serviceNetworkTemplate"] - del l2net["source"] - - curr_networks.append(l2net["networkName"]) - - have_create.append(l2net) + if (json_to_dict.get("vrfName", "")) == "NA": + t_conf = { + "vlanId": json_to_dict.get("vlanId", ""), + "gatewayIpAddress": json_to_dict.get("gatewayIpAddress", ""), + "isLayer2Only": json_to_dict.get("isLayer2Only", False), + "tag": json_to_dict.get("tag", ""), + "vlanName": json_to_dict.get("vlanName", ""), + "intfDescription": json_to_dict.get("intfDescription", ""), + "mtu": json_to_dict.get("mtu", ""), + "suppressArp": json_to_dict.get("suppressArp", False), + "dhcpServerAddr1": json_to_dict.get("dhcpServerAddr1", ""), + "dhcpServerAddr2": json_to_dict.get("dhcpServerAddr2", ""), + "dhcpServerAddr3": json_to_dict.get("dhcpServerAddr3", ""), + "vrfDhcp": json_to_dict.get("vrfDhcp", ""), + "vrfDhcp2": json_to_dict.get("vrfDhcp2", ""), + "vrfDhcp3": json_to_dict.get("vrfDhcp3", ""), + "loopbackId": json_to_dict.get("loopbackId", ""), + } + + l2net.update({"networkTemplateConfig": json.dumps(t_conf)}) + del l2net["displayName"] + del l2net["serviceNetworkTemplate"] + del l2net["source"] + + curr_networks.append(l2net["networkName"]) + + have_create.append(l2net) if not curr_networks: return @@ -1090,13 +1099,16 @@ def get_have(self): for attach in attach_list: attach_state = False if attach["lanAttachState"] == "NA" else True deploy = attach["isLanAttached"] + deployed = False if bool(deploy) and ( attach["lanAttachState"] == "OUT-OF-SYNC" or attach["lanAttachState"] == "PENDING" ): - deploy = False + deployed = False + else: + deployed = True - if bool(deploy): + if bool(deployed): dep_net = attach["networkName"] sn = attach["switchSerialNo"] @@ -1128,7 +1140,7 @@ def get_have(self): attach.update({"fabric": self.fabric}) attach.update({"vlan": vlan}) attach.update({"serialNumber": sn}) - attach.update({"deployment": deploy}) + attach.update({"deployment": deployed}) attach.update({"extensionValues": ""}) attach.update({"instanceValues": ""}) attach.update({"freeformConfig": ""}) @@ -1643,14 +1655,14 @@ def get_diff_merge(self, replace=False): atch_list = [] for attach in want_a["lanAttachList"]: del attach["isAttached"] - if bool(attach["deployment"]): - atch_list.append(attach) + atch_list.append(attach) if atch_list: base = want_a.copy() del base["lanAttachList"] base.update({"lanAttachList": atch_list}) diff_attach.append(base) - dep_net = want_a["networkName"] + if bool(attach["deployment"]): + dep_net = want_a["networkName"] if dep_net: all_nets.append(dep_net) diff --git a/plugins/modules/dcnm_policy.py b/plugins/modules/dcnm_policy.py index 534b64f03..4ceccc125 100644 --- a/plugins/modules/dcnm_policy.py +++ b/plugins/modules/dcnm_policy.py @@ -368,13 +368,13 @@ class DcnmPolicy: "CONFIG_PREVIEW": "/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true", }, 12: { - "POLICY_WITH_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", - "POLICY_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", - "POLICY_BULK_CREATE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/bulk-create", - "POLICY_MARK_DELETE": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}/mark-delete", - "POLICY_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/deploy", - "POLICY_CFG_DEPLOY": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/fabrics/{}/config-deploy/", - "POLICY_WITH_POLICY_ID": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/{}", + "POLICY_WITH_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/{}", + "POLICY_GET_SWITCHES": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", + "POLICY_BULK_CREATE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/bulk-create", + "POLICY_MARK_DELETE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/{}/mark-delete", + "POLICY_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/deploy", + "POLICY_CFG_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-deploy/", + "POLICY_WITH_POLICY_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/{}", "CONFIG_PREVIEW": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-preview?forceShowRun=false&showBrief=true", }, } diff --git a/plugins/modules/dcnm_service_route_peering.py b/plugins/modules/dcnm_service_route_peering.py index 73ff6b13e..a250fe9fa 100644 --- a/plugins/modules/dcnm_service_route_peering.py +++ b/plugins/modules/dcnm_service_route_peering.py @@ -4140,6 +4140,11 @@ def dcnm_srp_check_for_errors_in_resp(self, resp): resp["VLANS"] = re.findall( r"\d+", resp["DATA"]["error"].get("detail", "") ) + if "is already Detached state and no action taken" in resp["DATA"]["error"].get( + "detail", "" + ): + # This case can be considered as no error, because detaching an already detahced SRP may return this code + rc = "no_error" return rc def dcnm_srp_get_deployed_srp_list(self, diff_deploy): @@ -4368,6 +4373,10 @@ def dcnm_srp_send_message_to_dcnm(self): rc = self.dcnm_srp_check_for_errors_in_resp(resp) resp["METHOD"] = "DELETE" + if rc == "no_error": + resp = None + break + if rc == "in_use_error": # We may see this if SRPs use a vlan id already in use. In such a case delete the SRP directly # Mark this element for "no deploy" so that it is deleted directly without deploy @@ -4376,6 +4385,7 @@ def dcnm_srp_send_message_to_dcnm(self): break time.sleep(10) continue + if resp is not None: resp["RETRIES"] = retries self.result["response"].append(resp) diff --git a/plugins/modules/dcnm_template.py b/plugins/modules/dcnm_template.py index 059b1b8e4..66242ae75 100644 --- a/plugins/modules/dcnm_template.py +++ b/plugins/modules/dcnm_template.py @@ -197,8 +197,8 @@ class DcnmTemplate: }, 12: { "TEMP_VALIDATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/validate", - "TEMP_GET_SWITCHES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", - "TEMP_GET_SW_ROLES": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/control/switches/roles", + "TEMP_GET_SWITCHES": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/policies/switches?serialNumber={}", + "TEMP_GET_SW_ROLES": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/switches/roles", "TEMPLATE": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/template", "TEMP_DELETE_BULK": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/delete/bulk", "TEMPLATE_WITH_NAME": "/appcenter/cisco/ndfc/api/v1/configtemplate/rest/config/templates/{}", diff --git a/plugins/modules/dcnm_vrf.py b/plugins/modules/dcnm_vrf.py index ef3add483..0b2699b1c 100644 --- a/plugins/modules/dcnm_vrf.py +++ b/plugins/modules/dcnm_vrf.py @@ -141,11 +141,19 @@ deploy: description: - Per switch knob to control whether to deploy the attachment + - This knob has been deprecated from Ansible NDFC Collection Version 2.1.0 onwards. + There will not be any functional impact if specified in playbook. type: bool default: true deploy: description: - Global knob to control whether to deploy the attachment + - Ansible NDFC Collection Behavior for Version 2.0.1 and earlier + - This knob will create and deploy the attachment in DCNM only when set to "True" in playbook + - Ansible NDFC Collection Behavior for Version 2.1.0 and later + - Attachments specified in the playbook will always be created in DCNM. + This knob, when set to "True", will deploy the attachment in DCNM, by pushing the configs to switch. + If set to "False", the attachments will be created in DCNM, but will not be deployed type: bool default: true """ @@ -372,9 +380,9 @@ class DcnmVrf: "GET_VLAN": "/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", }, 12: { - "GET_VRF": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", - "GET_VRF_ATTACH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", - "GET_VRF_SWITCH": "/appcenter/cisco/ndfc/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", + "GET_VRF": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs", + "GET_VRF_ATTACH": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/attachments?vrf-names={}", + "GET_VRF_SWITCH": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfs/switches?vrf-names={}&serial-numbers={}", "GET_VRF_ID": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/top-down/fabrics/{}/vrfinfo", "GET_VLAN": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/resource-manager/vlan/{}?vlanUsageType=TOP_DOWN_VRF_VLAN", }, @@ -520,7 +528,7 @@ def diff_for_attach_deploy(self, want_a, have_a): dep_vrf = True if not found: - if bool(want["deployment"]): + if bool(want["isAttached"]): del want["isAttached"] attach_list.append(want) @@ -612,7 +620,7 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId): attach.update({"vrfName": vrf_name}) attach.update({"vlan": vlanId}) attach.update({"deployment": deploy}) - attach.update({"isAttached": deploy}) + attach.update({"isAttached": True}) attach.update({"serialNumber": serial}) if self.vrf_ext: attach.update({"extensionValues": json.dumps(ext_values).replace(" ", "")}) @@ -783,13 +791,16 @@ def get_have(self): for attach in attach_list: attach_state = False if attach["lanAttachState"] == "NA" else True deploy = attach["isLanAttached"] + deployed = False if bool(deploy) and ( attach["lanAttachState"] == "OUT-OF-SYNC" or attach["lanAttachState"] == "PENDING" ): - deploy = False + deployed = False + else: + deployed = True - if bool(deploy): + if bool(deployed): dep_vrf = attach["vrfName"] sn = attach["switchSerialNo"] @@ -813,7 +824,7 @@ def get_have(self): attach.update({"fabric": self.fabric}) attach.update({"vlan": vlan}) attach.update({"serialNumber": sn}) - attach.update({"deployment": deploy}) + attach.update({"deployment": deployed}) attach.update({"extensionValues": ""}) attach.update({"instanceValues": ""}) attach.update({"freeformConfig": ""}) @@ -1267,7 +1278,6 @@ def get_diff_merge(self): diff, vrf = self.diff_for_attach_deploy( want_a["lanAttachList"], have_a["lanAttachList"] ) - if diff: base = want_a.copy() del base["lanAttachList"] @@ -1284,14 +1294,14 @@ def get_diff_merge(self): for attach in want_a["lanAttachList"]: if attach.get("isAttached"): del attach["isAttached"] - if bool(attach["deployment"]): - atch_list.append(attach) + atch_list.append(attach) if atch_list: base = want_a.copy() del base["lanAttachList"] base.update({"lanAttachList": atch_list}) diff_attach.append(base) - dep_vrf = want_a["vrfName"] + if bool(attach["deployment"]): + dep_vrf = want_a["vrfName"] if dep_vrf: all_vrfs += dep_vrf + "," diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml index 4ce05666f..d74a2397f 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_multi_intf_merge.yaml @@ -132,7 +132,7 @@ ipv6_addr: fd08::0201 # ipV6 address for the loopback interface route_tag: "" # Routing Tag for the interface cmds: # Freeform config - - spanning-tree bpduguard enable + - logging event port link-status description: "loopback interface 100 configuration" - name: "{{ ansible_sub_intf1 }}" # should be of the form eth. @@ -151,7 +151,7 @@ ipv6_mask_len: 64 # choose between [min:64, max:127] mtu: 9216 # choose between [min:576, max:9216] cmds: # Freeform config - - spanning-tree bpduguard enable + - logging event port link-status description: "sub interface eth1/1.1 configuration" register: result @@ -255,18 +255,12 @@ - assert: that: - - '(result["diff"][0]["merged"] | length) == 1' + - '(result["diff"][0]["merged"] | length) == 0' - '(result["diff"][0]["deleted"] | length) == 0' - '(result["diff"][0]["replaced"] | length) == 0' - '(result["diff"][0]["overridden"] | length) == 0' - - '(result["diff"][0]["deploy"] | length) == 1' - - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["CONF"].split("\n") | length) == 1' - - '(result["diff"][0]["merged"][0]["interfaces"][0]["nvPairs"]["MEMBER_INTERFACES"].split(",") | length) == 1' - - - assert: - that: - - 'item["RETURN_CODE"] == 200' - loop: '{{ result.response }}' + - '(result["diff"][0]["deploy"] | length) == 0' + - 'result.changed == false' - name: Modify po300 - new aggregate members like members and cmds cisco.dcnm.dcnm_interface: diff --git a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml index 00de715b1..ab3aac284 100644 --- a/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml +++ b/tests/integration/targets/dcnm_interface/tests/dcnm/dcnm_intf_no_optional_elems.yaml @@ -126,11 +126,11 @@ - assert: that: - 'result.changed == true' - - '(result["diff"][0]["merged"] | length) == 11' + - '(result["diff"][0]["merged"] | length) == 10' - '(result["diff"][0]["deleted"] | length) == 0' - '(result["diff"][0]["replaced"] | length) == 0' - '(result["diff"][0]["overridden"] | length) == 0' - - '(result["diff"][0]["deploy"] | length) == 9' + - '(result["diff"][0]["deploy"] | length) == 8' - assert: that: @@ -273,4 +273,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined \ No newline at end of file + when: IT_CONTEXT is not defined diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml index e3854a1e3..9abf2ccbc 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/deleted.yaml @@ -102,8 +102,8 @@ - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"' - 'result.diff[0].attach[0].deploy == false' - 'result.diff[0].attach[1].deploy == false' - - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[0].attach[1].ip_address' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[0].attach[1].ip_address' - 'result.diff[0].net_name == "ansible-net13"' - name: DELETED - conf - Idempotence @@ -330,10 +330,10 @@ - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"' - 'result.diff[0].attach[0].deploy == false' - 'result.diff[1].attach[0].deploy == false' - - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - - 'result.diff[0].net_name == "ansible-net13"' - - 'result.diff[1].net_name == "ansible-net12"' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[1].attach[0].ip_address' + - '"ansible-net13" or "ansible-net12" in result.diff[1].net_name' + - '"ansible-net13" or "ansible-net12" in result.diff[0].net_name' - name: DELETED - conf3 - Idempotence cisco.dcnm.dcnm_network: *conf3 @@ -567,8 +567,8 @@ - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"' - 'result.diff[0].attach[0].deploy == false' - 'result.diff[1].attach[0].deploy == false' - - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch1 }}" in result.diff[1].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - 'result.diff[0].net_name == "ansible-net12"' - 'result.diff[1].net_name == "ansible-net13"' @@ -627,4 +627,4 @@ - name: DELETED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted \ No newline at end of file + state: deleted diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml index e934c83bc..3f52fdaa5 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/merged.yaml @@ -708,11 +708,11 @@ - 'result.response[0].attach[0].isLanAttached== true' - 'result.response[0].attach[0].lanAttachState== "DEPLOYED"' - 'result.response[0].attach[0].networkName== "ansible-net13"' - - '"{{ ansible_switch2 }}" in result.response[0].attach[0].ipAddress' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.response[0].attach[0].ipAddress' - 'result.response[0].attach[1].isLanAttached== true' - 'result.response[0].attach[1].lanAttachState== "DEPLOYED"' - 'result.response[0].attach[1].networkName== "ansible-net13"' - - '"{{ ansible_switch1 }}" in result.response[0].attach[1].ipAddress' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.response[0].attach[1].ipAddress' - name: MERGED - setup - Clean up any existing network cisco.dcnm.dcnm_network: @@ -934,4 +934,4 @@ - name: MERGED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted \ No newline at end of file + state: deleted diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml index 3522cdddc..732d7cf32 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/overridden.yaml @@ -285,10 +285,10 @@ - 'result.diff[1].attach[0].deploy == false' - 'result.diff[2].attach[0].deploy == false' - 'result.diff[2].attach[1].deploy == false' - - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[2].attach[0].ip_address' - - '"{{ ansible_switch1 }}" in result.diff[2].attach[1].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[1].attach[1].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[2].attach[0].ip_address' - 'result.diff[0].net_name == "ansible-net14"' - 'result.diff[0].net_id == 7005' - 'result.diff[0].vrf_name == "NA"' @@ -326,4 +326,4 @@ - name: OVERRIDDEN - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted \ No newline at end of file + state: deleted diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml index 91b665106..f00bf750a 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/query.yaml @@ -188,10 +188,10 @@ - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"' - 'result.diff[0].attach[0].deploy == false' - 'result.diff[1].attach[0].deploy == false' - - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - - 'result.diff[0].net_name == "ansible-net13"' - - 'result.diff[1].net_name == "ansible-net12"' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[1].attach[0].ip_address' + - '"ansible-net13" or "ansible-net12" in result.diff[1].net_name' + - '"ansible-net13" or "ansible-net12" in result.diff[0].net_name' - name: QUERY - sleep for 40 seconds for DCNM to completely update the state wait_for: @@ -460,4 +460,4 @@ - name: QUERY - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted \ No newline at end of file + state: deleted diff --git a/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml b/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml index aafafa8aa..7115c76d4 100644 --- a/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml +++ b/tests/integration/targets/dcnm_network/tests/dcnm/replaced.yaml @@ -122,12 +122,12 @@ - '(result.response[0].DATA|dict2items)[2].value == "SUCCESS"' - 'result.diff[0].attach[0].deploy == false' - 'result.diff[0].attach[1].deploy == false' - - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' - - '"{{ ansible_switch2 }}" in result.diff[0].attach[1].ip_address' - - 'result.diff[0].net_name == "ansible-net13"' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[1].attach[1].ip_address' + - '"ansible-net13" or "ansible-net12" in result.diff[1].net_name' - 'result.diff[1].attach[0].deploy == false' - - '"{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - - 'result.diff[1].net_name == "ansible-net12"' + - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' + - '"ansible-net13" or "ansible-net12" in result.diff[0].net_name' - name: REPLACED - sleep for 40 seconds for DCNM to completely update the state wait_for: @@ -342,14 +342,14 @@ - '(result.response[0].DATA|dict2items)[0].value == "SUCCESS"' - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"' - '(result.response[0].DATA|dict2items)[2].value == "SUCCESS"' - - 'result.diff[0].attach[0].deploy == false' - - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address' - - 'result.diff[0].net_name == "ansible-net12"' - 'result.diff[1].attach[0].deploy == false' - 'result.diff[1].attach[1].deploy == false' - - '"{{ ansible_switch2 }}" in result.diff[1].attach[0].ip_address' - - '"{{ ansible_switch1 }}" in result.diff[1].attach[1].ip_address' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address' - 'result.diff[1].net_name == "ansible-net13"' + - 'result.diff[0].attach[0].deploy == false' + - '"{{ ansible_switch2 }}" or "{{ ansible_switch1 }}" in result.diff[1].attach[0].ip_address' + - '"{{ ansible_switch1 }}" or "{{ ansible_switch2 }}" in result.diff[1].attach[1].ip_address' + - 'result.diff[0].net_name == "ansible-net12"' - name: REPLACED - sleep for 40 seconds for DCNM to completely update the state wait_for: @@ -452,4 +452,4 @@ - name: REPLACED - setup - remove any networks cisco.dcnm.dcnm_network: fabric: "{{ ansible_it_fabric }}" - state: deleted \ No newline at end of file + state: deleted diff --git a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml index 8c934b447..4822f9a68 100644 --- a/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml +++ b/tests/integration/targets/dcnm_service_route_peering/tests/dcnm/dcnm_service_route_peering_replace.yaml @@ -289,7 +289,7 @@ vlan_id: 191 # mandatory profile: ipv4_gw: 192.161.1.1/24 # mandatory - ipv6_gw: 2101:db01::1/64 # optional, default is '' + ipv6_gw: 2001:db01::1/64 # optional, default is '' vlan_name: rep-rp1-sn1-inside # optional, default is '' int_descr: "RP1 SN1 inside interface - REP" # optional, default is '' tag: 12111 # optional, default is 12345 @@ -300,7 +300,7 @@ vlan_id: 192 # mandatory profile: ipv4_gw: 192.161.2.1/24 # mandatory - ipv6_gw: 2101:db02::1/64 # optional, default is '' + ipv6_gw: 2001:db02::1/64 # optional, default is '' vlan_name: rep-rp1-sn1-outside # optional, default is '' int_descr: "RP1 SN1 outside interface- REP" # optionL, default is '' tag: 12112 # optional, default is 12345 @@ -316,7 +316,7 @@ vlan_id: 291 # mandatory profile: ipv4_gw: 192.162.1.1/24 # mandatory - ipv6_gw: 2102:db01::1/64 # optional, default is '' + ipv6_gw: 2002:db01::1/64 # optional, default is '' vlan_name: rep-rp2-sn1-inside # optional, default is '' int_descr: "RP2 SN1 inside interface - REP" # optional, default is '' static_route: # optional, default is '' @@ -331,7 +331,7 @@ vlan_id: 292 # mandatory profile: ipv4_gw: 192.162.2.1/24 # mandatory - ipv6_gw: 2102:db02::1/64 # optional, default is '' + ipv6_gw: 2002:db02::1/64 # optional, default is '' vlan_name: rep-rp2-sn1-outside # optional, default is '' int_descr: "RP2 SN1 outside interface - REP" # optional, default is '' static_route: # optional, default is '' @@ -351,7 +351,7 @@ vlan_id: 391 # mandatory profile: ipv4_gw: 192.163.1.1/24 # mandatory - ipv6_gw: 2103:db01::1/64 # optional, default is '' + ipv6_gw: 2003:db01::1/64 # optional, default is '' vlan_name: rep-rp3-sn1-inside # optional, default is '' int_descr: "RP3 SN1 inside interface - REP" # optional, default is '' tag: 32111 # optional, default is 12345 @@ -371,7 +371,7 @@ vlan_id: 302 # mandatory profile: ipv4_gw: 192.163.2.1/24 # mandatory - ipv6_gw: 2103:db02::1/64 # optional, default is '' + ipv6_gw: 2003:db02::1/64 # optional, default is '' vlan_name: rep-rp3-sn1-outside # optional, default is '' int_descr: "RP3 SN1 outside interface - REP" # optional, default is '' tag: 32112 # optional, default is 12345 @@ -554,7 +554,7 @@ vlan_id: 191 # mandatory profile: ipv4_gw: 192.161.1.1/24 # mandatory - ipv6_gw: 2101:db01::1/64 # optional, default is '' + ipv6_gw: 2001:db01::1/64 # optional, default is '' vlan_name: rep-rp1-sn1-inside # optional, default is '' int_descr: "RP1 SN1 inside interface - REP" # optional, default is '' tag: 12111 # optional, default is 12345 @@ -565,7 +565,7 @@ vlan_id: 192 # mandatory profile: ipv4_gw: 192.161.2.1/24 # mandatory - ipv6_gw: 2101:db02::1/64 # optional, default is '' + ipv6_gw: 2001:db02::1/64 # optional, default is '' vlan_name: rep-rp1-sn1-outside # optional, default is '' int_descr: "RP1 SN1 outside interface- REP" # optionL, default is '' tag: 12112 # optional, default is 12345 @@ -582,7 +582,7 @@ vlan_id: 291 # mandatory profile: ipv4_gw: 192.162.1.1/24 # mandatory - ipv6_gw: 2102:db01::1/64 # optional, default is '' + ipv6_gw: 2002:db01::1/64 # optional, default is '' vlan_name: rep-rp2-sn1-inside # optional, default is '' int_descr: "RP2 SN1 inside interface - REP" # optional, default is '' static_route: # optional, default is '' @@ -597,7 +597,7 @@ vlan_id: 292 # mandatory profile: ipv4_gw: 192.162.2.1/24 # mandatory - ipv6_gw: 2102:db02::1/64 # optional, default is '' + ipv6_gw: 2002:db02::1/64 # optional, default is '' vlan_name: rep-rp2-sn1-outside # optional, default is '' int_descr: "RP2 SN1 outside interface - REP" # optional, default is '' static_route: # optional, default is '' @@ -617,7 +617,7 @@ vlan_id: 391 # mandatory profile: ipv4_gw: 192.163.1.1/24 # mandatory - ipv6_gw: 2103:db01::1/64 # optional, default is '' + ipv6_gw: 2003:db01::1/64 # optional, default is '' vlan_name: rep-rp3-sn1-inside # optional, default is '' int_descr: "RP3 SN1 inside interface - REP" # optional, default is '' tag: 32111 # optional, default is 12345 @@ -637,7 +637,7 @@ vlan_id: 302 # mandatory profile: ipv4_gw: 192.163.2.1/24 # mandatory - ipv6_gw: 2103:db02::1/64 # optional, default is '' + ipv6_gw: 2003:db02::1/64 # optional, default is '' vlan_name: rep-rp3-sn1-outside # optional, default is '' int_descr: "RP3 SN1 outside interface - REP" # optional, default is '' tag: 32112 # optional, default is 12345 @@ -662,7 +662,7 @@ vlan_id: 491 # mandatory profile: ipv4_gw: 192.164.1.1/24 # mandatory - ipv6_gw: 2104:db01::1/64 # optional, default is '' + ipv6_gw: 2004:db01::1/64 # optional, default is '' vlan_name: rep-rp4-sn2-first-arm # optional, default is '' int_descr: "RP4 SN2 first arm intf - REP" # optional, default is '' tag: 42111 # optional, default is 12345 @@ -688,7 +688,7 @@ vlan_id: 591 # mandatory profile: ipv4_gw: 192.165.1.1/24 # mandatory - ipv6_gw: 2105:db01::1/64 # optional, default is '' + ipv6_gw: 2005:db01::1/64 # optional, default is '' vlan_name: rep-rp5-sn2-first-arm # optional, default is '' int_descr: "RP5 SN2 first arm intf - REP" # optional, default is '' tag: 52111 # optional, default is 12345 @@ -708,7 +708,7 @@ vlan_id: 592 # mandatory profile: ipv4_gw: 192.165.2.1/24 # mandatory - ipv6_gw: 2105:db02::1/64 # optional, default is '' + ipv6_gw: 2005:db02::1/64 # optional, default is '' vlan_name: rep-rp5-sn2-second-arm # optional, default is '' int_descr: "RP5 SN2 second arm intf - REP" # optional, default is '' tag: 52112 # optional, default is 12345 @@ -724,7 +724,7 @@ vlan_id: 691 # mandatory profile: ipv4_gw: 192.166.1.1/24 # mandatory - ipv6_gw: 2106:db01::1/64 # optional, default is '' + ipv6_gw: 2006:db01::1/64 # optional, default is '' vlan_name: rep-rp6-sn2-first-arm # optional, default is '' int_descr: "RP6 SN2 first arm intf - REP" # optional, default is '' tag: 62111 # optional, default is 12345 @@ -749,7 +749,7 @@ vlan_id: 791 # mandatory profile: ipv4_gw: 192.167.1.1/24 # mandatory - ipv6_gw: 2107:db01::1/64 # optional, default is '' + ipv6_gw: 2007:db01::1/64 # optional, default is '' vlan_name: rep-rp7-sn2-first-arm # optional, default is '' int_descr: "RP6 SN2 first arm intf - REP" # optional, default is '' tag: 72111 # optional, default is 12345 @@ -764,7 +764,7 @@ vlan_id: 792 # mandatory profile: ipv4_gw: 192.167.2.1/24 # mandatory - ipv6_gw: 2107:db02::1/64 # optional, default is '' + ipv6_gw: 2007:db02::1/64 # optional, default is '' vlan_name: rep-rp7-sn2-second-arm # optional, default is '' int_descr: "RP7 SN2 second arm intf - REP" # optional, default is '' tag: 72112 # optional, default is 12345 @@ -841,4 +841,4 @@ that: - 'item["RETURN_CODE"] == 200' loop: '{{ result.response }}' - when: IT_CONTEXT is not defined \ No newline at end of file + when: IT_CONTEXT is not defined From b4ca4303bfa99dba1edad9b906f066a344d3176f Mon Sep 17 00:00:00 2001 From: Praveen Ramoorthy Date: Tue, 19 Jul 2022 19:37:21 +0530 Subject: [PATCH 13/17] Changelog Updates --- CHANGELOG.md | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 98968592a..c55af7828 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,33 @@ This project adheres to [Semantic Versioning](http://semver.org/). ## Unreleased +## [2.1.0] - 2022-06-19 + +### Added + +* The following new modules are included in this release + * `dcnm_resource_manager` - Module for managing dcnm resources + [Reference Info](https://www.cisco.com/c/en/us/td/docs/dcn/ndfc/121x/configuration/fabric-controller/cisco-ndfc-fabric-controller-configuration-guide-121x/lan-fabrics.html#task_fsg_sn4_zqb) + +### Fixed + +* https://github.com/CiscoDevNet/ansible-dcnm/issues/151 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/143 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/141 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/139 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/137 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/134 +* https://github.com/CiscoDevNet/ansible-dcnm/issues/112 +* Fixed Restapi used in version detection mechanism in module utils. +* Fixed Restapi used in various modules to support the latest api's. +* Fixed deploy knob behavior for vrf and network module to align with GUI functionality. +* Fixed idempotence issue in interface module +* Fixed diff generation issue for network deletion with NDFC + +### Deprecated + +* Deploy knob for individual attachments in vrf and network modules has been marked for deprecation. + ## [2.0.1] - 2022-01-28 Fixed httpapi plugin issue preventing connections to latest version of NDFC (Version: `12.0.2f`) From 96c2fcd314d667af1306de08407aa32a4f9db692 Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 19 Jul 2022 19:40:54 +0530 Subject: [PATCH 14/17] Update CHANGELOG.md --- CHANGELOG.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c55af7828..39413415d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,7 @@ This project adheres to [Semantic Versioning](http://semver.org/). ### Added * The following new modules are included in this release - * `dcnm_resource_manager` - Module for managing dcnm resources + * `dcnm_resource_manager` - Module for managing dcnm resources. [Reference Info](https://www.cisco.com/c/en/us/td/docs/dcn/ndfc/121x/configuration/fabric-controller/cisco-ndfc-fabric-controller-configuration-guide-121x/lan-fabrics.html#task_fsg_sn4_zqb) ### Fixed @@ -24,8 +24,8 @@ This project adheres to [Semantic Versioning](http://semver.org/). * Fixed Restapi used in version detection mechanism in module utils. * Fixed Restapi used in various modules to support the latest api's. * Fixed deploy knob behavior for vrf and network module to align with GUI functionality. -* Fixed idempotence issue in interface module -* Fixed diff generation issue for network deletion with NDFC +* Fixed idempotence issue in interface module. +* Fixed diff generation issue for network deletion with NDFC. ### Deprecated @@ -170,6 +170,7 @@ The Ansible Cisco Data Center Network Manager (DCNM) collection includes modules * cisco.dcnm.dcnm_network - Add and remove Networks from a DCNM managed VXLAN fabric. * cisco.dcnm.dcnm_interface - DCNM Ansible Module for managing interfaces. +[2.1.0]: https://github.com/CiscoDevNet/ansible-dcnm/compare/2.0.1...2.1.0 [2.0.1]: https://github.com/CiscoDevNet/ansible-dcnm/compare/2.0.0...2.0.1 [2.0.0]: https://github.com/CiscoDevNet/ansible-dcnm/compare/1.2.4...2.0.0 [1.2.4]: https://github.com/CiscoDevNet/ansible-dcnm/compare/1.2.3...1.2.4 From 4b0cfbc0794f8155c6a7398834667f3fb5959012 Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 19 Jul 2022 19:41:47 +0530 Subject: [PATCH 15/17] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 39413415d..691e00a2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ This project adheres to [Semantic Versioning](http://semver.org/). ## Unreleased -## [2.1.0] - 2022-06-19 +## [2.1.0] - 2022-07-19 ### Added From 07d7afc5d623c7c8eabfbd3c5613b24d68a1a22f Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 19 Jul 2022 19:42:33 +0530 Subject: [PATCH 16/17] Update galaxy.yml --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index 23e95d093..3af865848 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,7 +1,7 @@ --- namespace: cisco name: dcnm -version: 2.0.1 +version: 2.1.0 readme: README.md authors: - Shrishail Kariyappanavar From 82b998a875fb98896ec62e89f71ef59f2b1afb6f Mon Sep 17 00:00:00 2001 From: praveenramoorthy <62758226+praveenramoorthy@users.noreply.github.com> Date: Tue, 19 Jul 2022 19:52:49 +0530 Subject: [PATCH 17/17] Update CHANGELOG.md --- CHANGELOG.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 691e00a2c..a7e69cc05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,8 +2,6 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/). -## Unreleased - ## [2.1.0] - 2022-07-19 ### Added