|
| 1 | +import json |
| 2 | +import logging |
| 3 | +import os |
| 4 | +import time |
| 5 | + |
| 6 | +import pytest |
| 7 | +from jsondiff import diff |
| 8 | +from prettytable import PrettyTable |
| 9 | + |
| 10 | +from lib.api import Api |
| 11 | + |
| 12 | +API_URL = "" |
| 13 | +API_TOKEN = "" |
| 14 | +WORKERS = 10 |
| 15 | +USER_SPECIFIED_SITE = "smeshsiteongoing-singlenode" |
| 16 | +TEST_DATA_SITE_FILE_NAME = "../json/smeshsiteongoing-singlenode-2.json" |
| 17 | + |
| 18 | +TEST_DATA_SITE_OLD_NAME = "smeshsiteongoing-singlenode" |
| 19 | +TEST_DATA_SITE_NEW_NAME = "ongoing-aws-crt-multi-sm2" |
| 20 | + |
| 21 | +# The below 2 vars are used in case of compare functionality tc. |
| 22 | +# Run the query manually and store json inside the file_names as below. They are manually created by user at the time of verification of the tc |
| 23 | +TEST_DATA_SITE_OLD_FILE_NAME_2 = "../json/smeshsiteongoing-singlenode.json" # read file |
| 24 | +TEST_DATA_SITE_NEW_FILE_NAME_2 = "../json/ongoing-aws-crt-multi-sm2.json" # read file |
| 25 | + |
| 26 | +GET_NAMESPACE = "ongoing-tests" |
| 27 | +GET_NAMESPACE_FILE = "../json/all_ns-2.json" |
| 28 | + |
| 29 | +# The below 2 vars are used inside the inventory tc. |
| 30 | +INVENTORY_FILE_CSV = "../csv/site-inventory-2.csv" |
| 31 | +TEST_DATA_SITE_OLD_FILE_NAME_REPLICA = "../json/smeshsiteongoing-singlenode.json" # read file |
| 32 | + |
| 33 | +DIFF_FILE_CSV = "../csv/diff-file-2.csv" |
| 34 | + |
| 35 | +# Configure the logging |
| 36 | +logger = logging.getLogger(__name__) |
| 37 | +logger.setLevel(logging.DEBUG) |
| 38 | + |
| 39 | + |
| 40 | +# This is a test to verify the query functionality using the given site name. |
| 41 | +# It is validates the functionality of cmd: ./get-sites.py -a <api> -t <token> -f <file_pathname_where_json_is_stored> -s <site_name> -q --log-stdout |
| 42 | +def test_api_site_query(): |
| 43 | + start_time = time.perf_counter() |
| 44 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=USER_SPECIFIED_SITE, |
| 45 | + workers=WORKERS) |
| 46 | + q.run() |
| 47 | + q.write_json_file(TEST_DATA_SITE_FILE_NAME) |
| 48 | + end_time = time.perf_counter() |
| 49 | + elapsed_time = end_time - start_time |
| 50 | + logger.info(f'Query time: {int(elapsed_time)} seconds with {WORKERS} workers') |
| 51 | + |
| 52 | + # Ensure file was created |
| 53 | + assert os.path.exists(TEST_DATA_SITE_FILE_NAME), f"{TEST_DATA_SITE_FILE_NAME} was not created." |
| 54 | + |
| 55 | + # Validate file contents |
| 56 | + with open(TEST_DATA_SITE_FILE_NAME, "r") as f: |
| 57 | + data = json.load(f) |
| 58 | + |
| 59 | + # Check on structure (fit actual expected structure) as well as to check if the data is not empty |
| 60 | + assert isinstance(data, dict) or isinstance(data, list), "Output JSON is not a dict or list." |
| 61 | + assert len(data) > 0, "Output file is empty." |
| 62 | + |
| 63 | + # Clean up file where retrieved json is stored |
| 64 | + os.remove(TEST_DATA_SITE_FILE_NAME) |
| 65 | + |
| 66 | + |
| 67 | +# This test verifies query functionality using the given namespace name. |
| 68 | +# It validates functionality of cmd: ./get-sites.py -a <api> -t <token> -f <file_pathname_where_json_is_stored> -n <namespace_name> -q --log-stdout |
| 69 | +def test_api_ns_query(): |
| 70 | + start_time = time.perf_counter() |
| 71 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=GET_NAMESPACE, site=None, workers=WORKERS) |
| 72 | + q.run() |
| 73 | + q.write_json_file(GET_NAMESPACE_FILE) |
| 74 | + end_time = time.perf_counter() |
| 75 | + elapsed_time = end_time - start_time |
| 76 | + logger.info(f'Query time: {int(elapsed_time)} seconds with {WORKERS} workers') |
| 77 | + |
| 78 | + # Ensure file was created |
| 79 | + assert os.path.exists(GET_NAMESPACE_FILE), f"file {GET_NAMESPACE_FILE} was not created." |
| 80 | + |
| 81 | + # Validate file contents |
| 82 | + with open(GET_NAMESPACE_FILE, "r") as f: |
| 83 | + data = json.load(f) |
| 84 | + |
| 85 | + # Check on structure (fit actual expected structure) as well as to check if data is not empty |
| 86 | + assert isinstance(data, dict) or isinstance(data, list), "Output JSON is not a dict or list." |
| 87 | + assert len(data) > 0, "Output file is empty." |
| 88 | + |
| 89 | + # Clean up file where retrieved json is stored |
| 90 | + os.remove(GET_NAMESPACE_FILE) |
| 91 | + |
| 92 | + |
| 93 | +# This test verifies the inventory functionality using the json_file and its path of a particular site. |
| 94 | +# It validates the cmd: ./get-sites.py -a <api> -t <token> -f <filepath_where_json_of_required_site_is_present> --build-inventory --inventory-table --inventory-file-csv <filepath_where_csv_of_obtained_inventory_data_is_to_be_stored> --log-stdout |
| 95 | +def test_api_inventory_function(caplog): |
| 96 | + caplog.set_level(logging.INFO) # Capture INFO logs |
| 97 | + |
| 98 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 99 | + |
| 100 | + # retrieving inventory data, asserting if it is not empty and writing data to inventory csv_file |
| 101 | + data = q.build_inventory(json_file=TEST_DATA_SITE_OLD_FILE_NAME_REPLICA) |
| 102 | + assert data is not None, "Inventory data is None" |
| 103 | + if data: |
| 104 | + q.write_string_file(INVENTORY_FILE_CSV, data.get_csv_string()) |
| 105 | + logger.info(f"\n\n{data.get_formatted_string('text')}\n") |
| 106 | + |
| 107 | + # Ensure file was created |
| 108 | + assert os.path.exists(INVENTORY_FILE_CSV), f"file {INVENTORY_FILE_CSV} was not created." |
| 109 | + |
| 110 | + # Validate file contents |
| 111 | + with open(INVENTORY_FILE_CSV, "r") as f: |
| 112 | + csv_content = f.read() |
| 113 | + |
| 114 | + # checking if the desired words are present in the both the csv_file as well as inventory_table logged to stdout |
| 115 | + EXPECTED_WORDS = ["kind", "spec", "node"] |
| 116 | + |
| 117 | + for word in EXPECTED_WORDS: |
| 118 | + assert word in csv_content, f"'{word}' not found in CSV content" |
| 119 | + |
| 120 | + # Assert pretty table (log) contains expected words |
| 121 | + log_output = caplog.text |
| 122 | + for word in EXPECTED_WORDS: |
| 123 | + assert word in log_output, f"'{word}' not found in log output (pretty table)" |
| 124 | + |
| 125 | + # Clean up file where obtained inventory csv is stored |
| 126 | + os.remove(INVENTORY_FILE_CSV) |
| 127 | + |
| 128 | + |
| 129 | +# This test verifies the compariso functionality using the json_files of the sites to be compared |
| 130 | +# It validates the cmd: ./get-sites.py -a <api> -t <token> -c --old-site <site1_name> --old-site-file <filepath_where_json_of_site1_is_present> --new-site <site2_name> --new-site-file <filepath_where_json_of_site2_is_present> --diff-table --diff-file-csv <filepath_where_compared_diff_is_to_be_written> --log-stdout |
| 131 | +def test_api_compare_function(caplog): |
| 132 | + caplog.set_level(logging.INFO) # Capture INFO logs |
| 133 | + |
| 134 | + # Here both sites being compared should be of same kind or smv-smv2 duo (eg: aws_vpc_site canot be compared with securemesh_site, securemesh_site can be compared with securemesh_site_v2) |
| 135 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 136 | + |
| 137 | + # retrieving data, asserting if data is not empty or not and logging and writing data to diff csv_file |
| 138 | + data = q.compare(old_site=TEST_DATA_SITE_OLD_NAME, old_file=TEST_DATA_SITE_OLD_FILE_NAME_2, |
| 139 | + new_site=TEST_DATA_SITE_NEW_NAME, new_file=TEST_DATA_SITE_NEW_FILE_NAME_2) |
| 140 | + assert data is not None, "Comparison data is None" |
| 141 | + if data: |
| 142 | + q.write_string_file(DIFF_FILE_CSV, data.get_csv_string()) |
| 143 | + logger.info(f"\n\n{data.get_formatted_string('text')}\n") |
| 144 | + |
| 145 | + # checking if obtained data is prettytable and the field_names of the table are the ones given |
| 146 | + assert isinstance(data, PrettyTable), "not pretty table" |
| 147 | + assert data.field_names == ["path", "values"], "expected field-names not present" |
| 148 | + |
| 149 | + # Ensure diff csv_file was created |
| 150 | + assert os.path.exists(DIFF_FILE_CSV), f"file {DIFF_FILE_CSV} was not created." |
| 151 | + |
| 152 | + # Read csv_file contents |
| 153 | + with open(DIFF_FILE_CSV, "r") as f: |
| 154 | + csv_content = f.read() |
| 155 | + |
| 156 | + # Ensure if expected words are present in csv_content retrieved from file as well as in the pretty_table in stdout. |
| 157 | + # Since names of comparing sites are mostly different, they key will mostly be present in comparison output. |
| 158 | + EXPECTED_WORDS = ["metadata/name"] |
| 159 | + |
| 160 | + for word in EXPECTED_WORDS: |
| 161 | + assert word in csv_content, f"'{word}' not found in CSV content" |
| 162 | + |
| 163 | + # Assert pretty table (log) contains expected words |
| 164 | + log_output = caplog.text |
| 165 | + for word in EXPECTED_WORDS: |
| 166 | + assert word in log_output, f"'{word}' not found in log output (pretty table)" |
| 167 | + |
| 168 | + # Clean up csv_file where diff data is stored |
| 169 | + os.remove(DIFF_FILE_CSV) |
| 170 | + |
| 171 | + |
| 172 | +# fixture to return the json_content of the json_file of the site1 retrieved using query functionality |
| 173 | +@pytest.fixture |
| 174 | +def data_old(): |
| 175 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 176 | + return q.read_json_file(TEST_DATA_SITE_OLD_FILE_NAME_2) |
| 177 | + |
| 178 | + |
| 179 | +# fixture to return the name of site1 |
| 180 | +@pytest.fixture |
| 181 | +def old_site(): |
| 182 | + return TEST_DATA_SITE_OLD_NAME |
| 183 | + |
| 184 | + |
| 185 | +# fixture to return dict_keys which are used to get the values from the site json query file |
| 186 | +@pytest.fixture |
| 187 | +def dict_keys(): |
| 188 | + return ['kind', 'metadata/name', 'spec/vip_vrrp_mode', 'spec/site_to_site_ipsec_connectivity', |
| 189 | + 'spec/main_nodes/0/name', 'spec/main_nodes/0/slo_address', 'spec/proactive_monitoring', |
| 190 | + 'nodes/node0/hw_info/memory/speed', 'nodes/node0/interfaces', 'namespaces/jeevan-ns/loadbalancer/http', |
| 191 | + 'namespaces/default/origin_pools', 'bgp', 'None/worker_node_count'] |
| 192 | + |
| 193 | + |
| 194 | +# test to verify if the below parameterized values match with the ones present in the site json file (obtained using query by site) using the dict_keys as keys |
| 195 | +@pytest.mark.parametrize("expected", [ |
| 196 | + [ |
| 197 | + ['securemesh_site'], |
| 198 | + ['smeshsiteongoing-singlenode'], |
| 199 | + ['VIP_VRRP_INVALID'], |
| 200 | + [[{'destination': ['10.144.11.158'], 'port': 0}]], |
| 201 | + ['master-0'], |
| 202 | + ['10.144.11.158'], |
| 203 | + [], |
| 204 | + [], |
| 205 | + ['eth1', 'eth2'], |
| 206 | + ['jeevan', 'jeevan-1', 'port-range'], |
| 207 | + ['gnu-on-ce'], |
| 208 | + ['ves-io-bgp-ves-io-securemesh-site-smeshsiteongoing-singlenode'], |
| 209 | + [], |
| 210 | + ] |
| 211 | +]) |
| 212 | +def test_verify_get_by_path(dict_keys, expected, old_site, data_old): |
| 213 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 214 | + |
| 215 | + for idx, k in enumerate(dict_keys): |
| 216 | + response = list() |
| 217 | + result = q._get_by_path(data_old['site'][old_site], k.split("/"), response) |
| 218 | + assert result == expected[idx] |
| 219 | + |
| 220 | + |
| 221 | +# fixture to return the dict_keys against which the ones in compared output are verified |
| 222 | +@pytest.fixture |
| 223 | +def dict_keys_new(): |
| 224 | + return ['kind', 'main_node_count', 'metadata/name', 'metadata/labels/host-os-version', 'metadata/labels/hw-model', |
| 225 | + 'metadata/labels/hw-vendor', 'metadata/labels/hw-version', 'metadata/labels/test-feature', |
| 226 | + 'metadata/labels/ves.io/provider', 'metadata/labels/chaos-test', 'metadata/labels/public-ip', |
| 227 | + 'metadata/labels/site-mesh-group-type', 'metadata/labels/smv2-smg-fullmesh-public-auto', |
| 228 | + 'metadata/labels/test-feature-automation', 'metadata/labels/ver-type', 'metadata/labels/waap-on-smsv2', |
| 229 | + 'metadata/labels/iperf-test', 'metadata/labels/ms-smg', 'metadata/labels/name', 'metadata/labels/purpose', |
| 230 | + 'metadata/labels/single-ver', 'metadata/labels/smeshongoing', 'metadata/description', 'spec/address', |
| 231 | + 'spec/volterra_software_version', 'spec/connected_re/0/uid', 'spec/connected_re/0/name', |
| 232 | + 'spec/connected_re/1/uid', 'spec/connected_re/1/name', 'spec/connected_re_for_config/0/uid', |
| 233 | + 'spec/connected_re_for_config/0/name', 'spec/vip_vrrp_mode', 'spec/tunnel_type', |
| 234 | + 'spec/operating_system_version', 'spec/region', 'spec/site_to_site_ipsec_connectivity', |
| 235 | + 'spec/main_nodes/2/name', 'spec/main_nodes/2/slo_address', 'spec/admin_user_credentials', |
| 236 | + 'spec/proactive_monitoring', 'nodes/node0/hostname', 'nodes/node0/hw_info/os/name', |
| 237 | + 'nodes/node0/hw_info/os/version', 'nodes/node0/hw_info/os/release', 'nodes/node0/hw_info/product/name', |
| 238 | + 'nodes/node0/hw_info/product/vendor', 'nodes/node0/hw_info/product/version', |
| 239 | + 'nodes/node0/hw_info/board/vendor', 'nodes/node0/hw_info/chassis/vendor', |
| 240 | + 'nodes/node0/hw_info/chassis/version', 'nodes/node0/hw_info/bios/vendor', |
| 241 | + 'nodes/node0/hw_info/bios/version', 'nodes/node0/hw_info/bios/date', 'nodes/node0/hw_info/cpu/model', |
| 242 | + 'nodes/node0/hw_info/cpu/speed', 'nodes/node0/hw_info/cpu/cache', 'nodes/node0/hw_info/cpu/flags', |
| 243 | + 'nodes/node0/hw_info/memory/type', 'nodes/node0/hw_info/memory/speed', 'nodes/node0/hw_info/memory/size_mb', |
| 244 | + 'nodes/node0/hw_info/storage/0/name', 'nodes/node0/hw_info/storage/0/driver', |
| 245 | + 'nodes/node0/hw_info/storage/0/model', 'nodes/node0/hw_info/storage/0/size_gb', |
| 246 | + 'nodes/node0/hw_info/network', 'nodes/node0/hw_info/kernel/release', 'nodes/node0/hw_info/kernel/version', |
| 247 | + 'nodes/node0/hw_info/usb', 'nodes/node0/interfaces', 'nodes/node1/hostname', 'nodes/node1/hw_info/os/name', |
| 248 | + 'nodes/node1/hw_info/os/vendor', 'nodes/node1/hw_info/os/version', 'nodes/node1/hw_info/os/release', |
| 249 | + 'nodes/node1/hw_info/os/architecture', 'nodes/node1/hw_info/product/name', |
| 250 | + 'nodes/node1/hw_info/product/vendor', 'nodes/node1/hw_info/product/version', |
| 251 | + 'nodes/node1/hw_info/board/name', 'nodes/node1/hw_info/board/vendor', 'nodes/node1/hw_info/board/version', |
| 252 | + 'nodes/node1/hw_info/chassis/type', 'nodes/node1/hw_info/chassis/vendor', |
| 253 | + 'nodes/node1/hw_info/chassis/version', 'nodes/node1/hw_info/bios/vendor', |
| 254 | + 'nodes/node1/hw_info/bios/version', 'nodes/node1/hw_info/bios/date', 'nodes/node1/hw_info/cpu/vendor', |
| 255 | + 'nodes/node1/hw_info/cpu/model', 'nodes/node1/hw_info/cpu/speed', 'nodes/node1/hw_info/cpu/cache', |
| 256 | + 'nodes/node1/hw_info/cpu/cpus', 'nodes/node1/hw_info/cpu/cores', 'nodes/node1/hw_info/cpu/threads', |
| 257 | + 'nodes/node1/hw_info/cpu/flags', 'nodes/node1/hw_info/memory/type', 'nodes/node1/hw_info/memory/speed', |
| 258 | + 'nodes/node1/hw_info/memory/size_mb', 'nodes/node1/hw_info/storage', 'nodes/node1/hw_info/network', |
| 259 | + 'nodes/node1/hw_info/kernel/release', 'nodes/node1/hw_info/kernel/version', |
| 260 | + 'nodes/node1/hw_info/kernel/architecture', 'nodes/node1/hw_info/usb', 'nodes/node1/hw_info/numa_nodes', |
| 261 | + 'nodes/node2/hostname', 'nodes/node2/hw_info/os/name', 'nodes/node2/hw_info/os/vendor', |
| 262 | + 'nodes/node2/hw_info/os/version', 'nodes/node2/hw_info/os/release', 'nodes/node2/hw_info/os/architecture', |
| 263 | + 'nodes/node2/hw_info/product/name', 'nodes/node2/hw_info/product/vendor', |
| 264 | + 'nodes/node2/hw_info/product/version', 'nodes/node2/hw_info/board/name', 'nodes/node2/hw_info/board/vendor', |
| 265 | + 'nodes/node2/hw_info/board/version', 'nodes/node2/hw_info/chassis/type', |
| 266 | + 'nodes/node2/hw_info/chassis/vendor', 'nodes/node2/hw_info/chassis/version', |
| 267 | + 'nodes/node2/hw_info/bios/vendor', 'nodes/node2/hw_info/bios/version', 'nodes/node2/hw_info/bios/date', |
| 268 | + 'nodes/node2/hw_info/cpu/vendor', 'nodes/node2/hw_info/cpu/model', 'nodes/node2/hw_info/cpu/speed', |
| 269 | + 'nodes/node2/hw_info/cpu/cache', 'nodes/node2/hw_info/cpu/cpus', 'nodes/node2/hw_info/cpu/cores', |
| 270 | + 'nodes/node2/hw_info/cpu/threads', 'nodes/node2/hw_info/cpu/flags', 'nodes/node2/hw_info/memory/type', |
| 271 | + 'nodes/node2/hw_info/memory/speed', 'nodes/node2/hw_info/memory/size_mb', 'nodes/node2/hw_info/storage', |
| 272 | + 'nodes/node2/hw_info/network', 'nodes/node2/hw_info/kernel/release', 'nodes/node2/hw_info/kernel/version', |
| 273 | + 'nodes/node2/hw_info/kernel/architecture', 'nodes/node2/hw_info/usb', 'nodes/node2/hw_info/numa_nodes', |
| 274 | + 'namespaces', 'bgp', 'None/worker_node_count'] |
| 275 | + |
| 276 | + |
| 277 | +# fixture to return the json_content of the site2 |
| 278 | +@pytest.fixture |
| 279 | +def data_new(): |
| 280 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 281 | + return q.read_json_file(TEST_DATA_SITE_NEW_FILE_NAME_2) |
| 282 | + |
| 283 | + |
| 284 | +# fixture to return the name of the site2 |
| 285 | +@pytest.fixture |
| 286 | +def new_site(): |
| 287 | + return TEST_DATA_SITE_NEW_NAME |
| 288 | + |
| 289 | + |
| 290 | +# Tc to check if the keys of comparison table are as expected to verify if right set of keys are retrieved in comparison |
| 291 | +def test_verify_get_keys(dict_keys_new, old_site, data_old, new_site, data_new): |
| 292 | + q = Api(logger=logger, api_url=API_URL, api_token=API_TOKEN, namespace=None, site=None, workers=WORKERS) |
| 293 | + |
| 294 | + # The below diff is commented as the output of it is directly given above as parameterized. |
| 295 | + compared1 = diff(data_old['site'][old_site], data_new['site'][new_site], syntax="compact") |
| 296 | + r = [] |
| 297 | + |
| 298 | + assert q._get_keys(None, compared1, r, old_site, data_old) == dict_keys_new |
0 commit comments