diff --git a/dump/__init__.py b/dump/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dump/helper.py b/dump/helper.py new file mode 100644 index 0000000000..806f2d0e26 --- /dev/null +++ b/dump/helper.py @@ -0,0 +1,35 @@ +import os, sys + +def create_template_dict(dbs): + """ Generate a Template which will be returned by Executor Classes """ + return {db: {'keys': [], 'tables_not_found': []} for db in dbs} + +def verbose_print(str): + if "VERBOSE" in os.environ and os.environ["VERBOSE"] == "1": + print(str) + +def handle_error(err_str, excep=False): + """ + Handles general error conditions, if any experienced by the module, + Set excep = True, to raise a exception + """ + if excep: + raise Exception("ERROR : {}".format(err_str)) + else: + print("ERROR : {}".format(err_str), file = sys.stderr) + + +def handle_multiple_keys_matched_error(err_str, key_to_go_with="", excep=False): + if excep: + handle_error(err_str, True) + else: + print("ERROR (AMBIGUITY): {} \n Proceeding with the key {}".format(err_str, key_to_go_with), file = sys.stderr) + + +def sort_lists(ret_template): + """ Used to sort the nested list returned by the template dict. """ + for db in ret_template.keys(): + for key in ret_template[db].keys(): + if isinstance(ret_template[db][key], list): + ret_template[db][key].sort() + return ret_template diff --git a/dump/match_infra.py b/dump/match_infra.py new file mode 100644 index 0000000000..fe0d8126a9 --- /dev/null +++ b/dump/match_infra.py @@ -0,0 +1,300 @@ +import json, fnmatch +from abc import ABC, abstractmethod +from dump.helper import verbose_print +from swsscommon.swsscommon import SonicV2Connector, SonicDBConfig +from sonic_py_common import multi_asic +from utilities_common.constants import DEFAULT_NAMESPACE + +EXCEP_DICT = { + "INV_REQ": "Argument should be of type MatchRequest", + "INV_DB": "DB provided is not valid", + "NO_MATCHES": "No Entries found for Table|key_pattern provided", + "NO_SRC": "Either one of db or file in the request should be non-empty", + "NO_TABLE": "No 'table' name provided", + "NO_KEY": "'key_pattern' cannot be empty", + "NO_VALUE" : "Field is provided, but no value is provided to compare with", + "SRC_VAGUE": "Only one of db or file should be provided", + "CONN_ERR" : "Connection Error", + "JUST_KEYS_COMPAT": "When Just_keys is set to False, return_fields should be empty", + "BAD_FORMAT_RE_FIELDS": "Return Fields should be of list type", + "NO_ENTRIES": "No Keys found after applying the filtering criteria", + "FILE_R_EXEP": "Exception Caught While Reading the json cfg file provided", + "INV_NS": "Namespace is invalid" +} + +class MatchRequest: + """ + Request Object which should be passed to the MatchEngine + + Attributes: + "table" : A Valid Table Name + "key_pattern" : Pattern of the redis-key to match. Defaults to "*". Eg: "*" will match all the keys. + Supports these glob style patterns. https://redis.io/commands/KEYS + "field" : Field to check for a match,Defaults to None + "value" : Value to match, Defaults to None + "return_fields" : An iterable type, where each element woudld imply a field to return from all the filtered keys + "db" : A Valid DB name, Defaults to "". + "file" : A Valid Config JSON file, Eg: copp_cfg.json, Defaults to "". + Only one of the db/file fields should have a non-empty string. + "just_keys" : If true, Only Returns the keys matched. Does not return field-value pairs. Defaults to True + "ns" : namespace argument, if nothing is provided, default namespace is used + "match_entire_list" : When this arg is set to true, entire list is matched incluing the ",". + When False, the values are split based on "," and individual items are matched with + """ + def __init__(self, **kwargs): + self.table = kwargs["table"] if "table" in kwargs else None + self.key_pattern = kwargs["key_pattern"] if "key_pattern" in kwargs else "*" + self.field = kwargs["field"] if "field" in kwargs else None + self.value = kwargs["value"] if "value" in kwargs else None + self.return_fields = kwargs["return_fields"] if "return_fields" in kwargs else [] + self.db = kwargs["db"] if "db" in kwargs else "" + self.file = kwargs["file"] if "file" in kwargs else "" + self.just_keys = kwargs["just_keys"] if "just_keys" in kwargs else True + self.ns = kwargs["ns"] if "ns" in kwargs else "" + self.match_entire_list = kwargs["match_entire_list"] if "match_entire_list" in kwargs else False + err = self.__static_checks() + verbose_print(str(err)) + if err: + raise Exception("Static Checks for the MatchRequest Failed, Reason: \n" + err) + + + def __static_checks(self): + + if not self.db and not self.file: + return EXCEP_DICT["NO_SRC"] + + if self.db and self.file: + return EXCEP_DICT["SRC_VAGUE"] + + if not self.db: + try: + with open(self.file) as f: + json.load(f) + except Exception as e: + return EXCEP_DICT["FILE_R_EXEP"] + str(e) + + if not self.file and self.db not in SonicDBConfig.getDbList(): + return EXCEP_DICT["INV_DB"] + + if not self.table: + return EXCEP_DICT["NO_TABLE"] + + if not isinstance(self.return_fields, list): + return EXCEP_DICT["BAD_FORMAT_RE_FIELDS"] + + if not self.just_keys and self.return_fields: + return EXCEP_DICT["JUST_KEYS_COMPAT"] + + if self.field and not self.value: + return EXCEP_DICT["NO_VALUE"] + + if self.ns != DEFAULT_NAMESPACE and self.ns not in multi_asic.get_namespace_list(): + return EXCEP_DICT["INV_NS"] + " Choose From {}".format(multi_asic.get_namespace_list()) + + verbose_print("MatchRequest Checks Passed") + + return "" + + def __str__(self): + str = "----------------------- \n MatchRequest: \n" + if self.db: + str += "db:{} , ".format(self.db) + if self.file: + str += "file:{} , ".format(self.file) + if self.table: + str += "table:{} , ".format(self.table) + if self.key_pattern: + str += "key_pattern:{} , ".format(self.key_pattern) + if self.field: + str += "field:{} , ".format(self.field) + if self.value: + str += "value:{} , ".format(self.value) + if self.just_keys: + str += "just_keys:True ," + else: + str += "just_keys:False ," + if len(self.return_fields) > 0: + str += "return_fields: " + ",".join(self.return_fields) + " " + if self.ns: + str += "namespace: , " + self.ns + if self.match_entire_list: + str += "match_list: True , " + else: + str += "match_list: False , " + return str + +class SourceAdapter(ABC): + """ Source Adaptor offers unified interface to Data Sources """ + + def __init__(self): + pass + + @abstractmethod + def connect(self, db, ns): + """ Return True for Success, False for failure """ + return False + + @abstractmethod + def getKeys(self, db, table, key_pattern): + return [] + + @abstractmethod + def get(self, db, key): + return {} + + @abstractmethod + def hget(self, db, key, field): + return "" + + @abstractmethod + def get_separator(self, db): + return "" + +class RedisSource(SourceAdapter): + """ Concrete Adaptor Class for connecting to Redis Data Sources """ + + def __init__(self): + self.conn = None + + def connect(self, db, ns): + try: + if not SonicDBConfig.isInit(): + if multi_asic.is_multi_asic(): + SonicDBConfig.load_sonic_global_db_config() + else: + SonicDBConfig.load_sonic_db_config() + self.conn = SonicV2Connector(namespace=ns, use_unix_socket_path=True) + self.conn.connect(db) + except Exception as e: + verbose_print("RedisSource: Connection Failed\n" + str(e)) + return False + return True + + def get_separator(self, db): + return self.conn.get_db_separator(db) + + def getKeys(self, db, table, key_pattern): + return self.conn.keys(db, table + self.get_separator(db) + key_pattern) + + def get(self, db, key): + return self.conn.get_all(db, key) + + def hget(self, db, key, field): + return self.conn.get(db, key, field) + +class JsonSource(SourceAdapter): + """ Concrete Adaptor Class for connecting to JSON Data Sources """ + + def __init__(self): + self.json_data = None + + def connect(self, db, ns): + try: + with open(db) as f: + self.json_data = json.load(f) + except Exception as e: + verbose_print("JsonSource: Loading the JSON file failed" + str(e)) + return False + return True + + def get_separator(self, db): + return SonicDBConfig.getSeparator("CONFIG_DB") + + def getKeys(self, db, table, key_pattern): + if table not in self.json_data: + return [] + # https://docs.python.org/3.7/library/fnmatch.html + kp = key_pattern.replace("[^", "[!") + kys = fnmatch.filter(self.json_data[table].keys(), kp) + return [table + self.get_separator(db) + ky for ky in kys] + + def get(self, db, key): + sep = self.get_separator(db) + table, key = key.split(sep, 1) + return self.json_data.get(table, {}).get(key, {}) + + def hget(self, db, key, field): + sep = self.get_separator(db) + table, key = key.split(sep, 1) + return self.json_data.get(table, "").get(key, "").get(field, "") + +class MatchEngine: + """ Pass in a MatchRequest, to fetch the Matched dump from the Data sources """ + + def __get_source_adapter(self, req): + src = None + d_src = "" + if req.db: + d_src = req.db + src = RedisSource() + else: + d_src = req.file + src = JsonSource() + return d_src, src + + def __create_template(self): + return {"error" : "", "keys" : [], "return_values" : {}} + + def __display_error(self, err): + template = self.__create_template() + template['error'] = err + verbose_print("MatchEngine: \n" + template['error']) + return template + + def __filter_out_keys(self, src, req, all_matched_keys): + # TODO: Custom Callbacks for Complex Matching Criteria + if not req.field: + return all_matched_keys + + filtered_keys = [] + for key in all_matched_keys: + f_values = src.hget(req.db, key, req.field) + if "," in f_values and not req.match_entire_list: + f_value = f_values.split(",") + else: + f_value = [f_values] + if req.value in f_value: + filtered_keys.append(key) + return filtered_keys + + def __fill_template(self, src, req, filtered_keys, template): + for key in filtered_keys: + temp = {} + if not req.just_keys: + temp[key] = src.get(req.db, key) + template["keys"].append(temp) + elif len(req.return_fields) > 0: + template["keys"].append(key) + template["return_values"][key] = {} + for field in req.return_fields: + template["return_values"][key][field] = src.hget(req.db, key, field) + else: + template["keys"].append(key) + verbose_print("Return Values:" + str(template["return_values"])) + return template + + def fetch(self, req): + """ Given a request obj, find its match in the data source provided """ + if not isinstance(req, MatchRequest): + return self.__display_error(EXCEP_DICT["INV_REQ"]) + + verbose_print(str(req)) + + if not req.key_pattern: + return self.__display_error(EXCEP_DICT["NO_KEY"]) + + d_src, src = self.__get_source_adapter(req) + if not src.connect(d_src, req.ns): + return self.__display_error(EXCEP_DICT["CONN_ERR"]) + + template = self.__create_template() + all_matched_keys = src.getKeys(req.db, req.table, req.key_pattern) + if not all_matched_keys: + return self.__display_error(EXCEP_DICT["NO_MATCHES"]) + + filtered_keys = self.__filter_out_keys(src, req, all_matched_keys) + verbose_print("Filtered Keys:" + str(filtered_keys)) + if not filtered_keys: + return self.__display_error(EXCEP_DICT["NO_ENTRIES"]) + return self.__fill_template(src, req, filtered_keys, template) + diff --git a/tests/dump_input/copp_cfg.json b/tests/dump_input/copp_cfg.json new file mode 100644 index 0000000000..cae5f4e8e0 --- /dev/null +++ b/tests/dump_input/copp_cfg.json @@ -0,0 +1,103 @@ +{ + "COPP_GROUP": { + "default": { + "queue": "0", + "meter_type":"packets", + "mode":"sr_tcm", + "cir":"600", + "cbs":"600", + "red_action":"drop" + }, + "queue4_group1": { + "trap_action":"trap", + "trap_priority":"4", + "queue": "4" + }, + "queue4_group2": { + "trap_action":"copy", + "trap_priority":"4", + "queue": "4", + "meter_type":"packets", + "mode":"sr_tcm", + "cir":"600", + "cbs":"600", + "red_action":"drop" + }, + "queue4_group3": { + "trap_action":"trap", + "trap_priority":"4", + "queue": "4" + }, + "queue1_group1": { + "trap_action":"trap", + "trap_priority":"1", + "queue": "1", + "meter_type":"packets", + "mode":"sr_tcm", + "cir":"6000", + "cbs":"6000", + "red_action":"drop" + }, + "queue1_group2": { + "trap_action":"trap", + "trap_priority":"1", + "queue": "1", + "meter_type":"packets", + "mode":"sr_tcm", + "cir":"600", + "cbs":"600", + "red_action":"drop" + }, + "queue2_group1": { + "cbs": "1000", + "cir": "1000", + "genetlink_mcgrp_name": "packets", + "genetlink_name": "psample", + "meter_type": "packets", + "mode": "sr_tcm", + "queue": "2", + "red_action": "drop", + "trap_action": "trap", + "trap_priority": "1" + + } + }, + "COPP_TRAP": { + "bgp": { + "trap_ids": "bgp,bgpv6", + "trap_group": "queue4_group1" + }, + "lacp": { + "trap_ids": "lacp", + "trap_group": "queue4_group1" + }, + "arp": { + "trap_ids": "arp_req,arp_resp,neigh_discovery", + "trap_group": "queue4_group2" + }, + "lldp": { + "trap_ids": "lldp", + "trap_group": "queue4_group3" + }, + "dhcp": { + "trap_ids": "dhcp,dhcpv6", + "trap_group": "queue4_group3" + }, + "udld": { + "trap_ids": "udld", + "trap_group": "queue4_group3" + }, + "ip2me": { + "trap_ids": "ip2me", + "trap_group": "queue1_group1" + }, + "nat": { + "trap_ids": "src_nat_miss,dest_nat_miss", + "trap_group": "queue1_group2" + }, + "sflow": { + "trap_group": "queue2_group1", + "trap_ids": "sample_packet" + } + } +} diff --git a/tests/dump_tests/__init__.py b/tests/dump_tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/dump_tests/match_engine_test.py b/tests/dump_tests/match_engine_test.py new file mode 100644 index 0000000000..a4d4330b9b --- /dev/null +++ b/tests/dump_tests/match_engine_test.py @@ -0,0 +1,248 @@ +import os, sys +import unittest +import pytest +from dump.match_infra import MatchEngine, EXCEP_DICT, MatchRequest +from deepdiff import DeepDiff +from importlib import reload + +test_path = os.path.join(os.path.dirname(__file__), "../") +dump_test_input = os.path.join(test_path, "dump_input") + +sys.path.append(test_path) + +@pytest.fixture(scope="module", autouse=True) +def mock_setup(): + print("SETUP") + os.environ["VERBOSE"] = "1" + yield + print("TEARDOWN") + os.environ["VERBOSE"] = "0" + + +class TestMatchRequestValidation(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestMatchRequestValidation, self).__init__(*args, **kwargs) + self.match_engine = MatchEngine() + + def assertRaisesWithMessage(self, msg, func, *args, **kwargs): + try: + func(*args, **kwargs) + assert False, "Expected an exception with msg: " + msg + except Exception as inst: + print(inst) + assert msg in str(inst) + + def test_bad_request(self): + req = [] + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["INV_REQ"] + + def test_no_source(self): + self.assertRaisesWithMessage(EXCEP_DICT["NO_SRC"], MatchRequest) + + def test_vague_source(self): + self.assertRaisesWithMessage(EXCEP_DICT["SRC_VAGUE"], MatchRequest, db="CONFIG_DB", file="/etc/sonic/copp_cfg.json") + + def test_no_file(self): + self.assertRaisesWithMessage(EXCEP_DICT["FILE_R_EXEP"], MatchRequest, file=os.path.join(test_path, "random_db.json")) + + def test_invalid_db(self): + self.assertRaisesWithMessage(EXCEP_DICT["INV_DB"], MatchRequest, db="CONFIGURATION_DB") + + def test_invalid_namespace(self): + self.assertRaisesWithMessage(EXCEP_DICT["INV_NS"], MatchRequest, db="APPL_DB", table="PORT_TABLE", + field="lanes", value="202", ns="asic4") + + def test_bad_key_pattern(self): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="") + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["NO_KEY"] + + def test_no_value(self): + self.assertRaisesWithMessage(EXCEP_DICT["NO_VALUE"], MatchRequest, db="APPL_DB", table="COPP_TABLE", key_pattern="*", field="trap_ids", value="") + + def test_no_table(self): + self.assertRaisesWithMessage(EXCEP_DICT["NO_TABLE"], MatchRequest, db="APPL_DB", table="", key_pattern="*", field="trap_ids", value="bgpv6") + + def test_just_keys_return_fields_compat(self): + self.assertRaisesWithMessage(EXCEP_DICT["JUST_KEYS_COMPAT"], MatchRequest, db="APPL_DB", return_fields=["trap_group"], table="COPP_TABLE", + key_pattern="*", field="trap_ids", value="", just_keys=False) + + def test_invalid_combination(self): + req = MatchRequest(db="CONFIG_DB", table="COPP_TRAP", key_pattern="*", field="trap_ids", value="sample_packet") + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["NO_MATCHES"] + + def test_return_fields_bad_format(self): + self.assertRaisesWithMessage(EXCEP_DICT["BAD_FORMAT_RE_FIELDS"], MatchRequest, db="STATE_DB", table="REBOOT_CAUSE", key_pattern="*", return_fields="cause") + + def test_valid_match_request(self): + try: + req = MatchRequest(db="APPL_DB", table="PORT_TABLE", field="lanes", value="202") + except Exception as e: + assert False, "Exception Raised for a Valid MatchRequest" + str(e) + + +class TestMatchEngine(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestMatchEngine, self).__init__(*args, **kwargs) + self.match_engine = MatchEngine() + + def test_key_pattern_wildcard(self): + req = MatchRequest(db="CONFIG_DB", table="SFLOW_COLLECTOR", key_pattern="*") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 2 + assert "SFLOW_COLLECTOR|ser5" in ret['keys'] + assert "SFLOW_COLLECTOR|prod" in ret['keys'] + + def test_key_pattern_complex(self): + req = MatchRequest(db="CONFIG_DB", table="ACL_RULE", key_pattern="EVERFLOW*") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 2 + assert "ACL_RULE|EVERFLOW|RULE_6" in ret['keys'] + assert "ACL_RULE|EVERFLOW|RULE_08" in ret['keys'] + + def test_field_value_match(self): + req = MatchRequest(db="CONFIG_DB", table="ACL_TABLE", field="policy_desc", value="SSH_ONLY") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "ACL_TABLE|SSH_ONLY" in ret['keys'] + + def test_field_value_match_list_type(self): + req = MatchRequest(db="APPL_DB", table="PORT_TABLE", field="lanes", value="202") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "PORT_TABLE:Ethernet200" in ret['keys'] + + def test_for_no_match(self): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", field="SAI_SWITCH_ATTR_SRC_MAC_ADDRESS", value="DE:AD:EE:EE:EE") + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] + assert len(ret["keys"]) == 0 + + def test_for_no_key_match(self): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", key_pattern="oid:0x22*") + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["NO_MATCHES"] + + def test_field_value_no_match(self): + req = MatchRequest(db="STATE_DB", table="FAN_INFO", key_pattern="*", field="led_status", value="yellow") + ret = self.match_engine.fetch(req) + assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] + assert len(ret["keys"]) == 0 + + def test_return_keys(self): + req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", return_fields=["cause"]) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 2 + assert "warm-reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_04_53_58"]["cause"] + assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] + + def test_return_fields_with_key_filtering(self): + req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", key_pattern="2020_10_09_02*", return_fields=["cause"]) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] + + def test_return_fields_with_field_value_filtering(self): + req = MatchRequest(db="STATE_DB", table="CHASSIS_MODULE_TABLE", field="oper_status", value="Offline", return_fields=["slot"]) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "18" == ret["return_values"]["CHASSIS_MODULE_TABLE|FABRIC-CARD1"]["slot"] + + def test_return_fields_with_all_filtering(self): + req = MatchRequest(db="STATE_DB", table="VXLAN_TUNNEL_TABLE", key_pattern="EVPN_25.25.25.2*", field="operstatus", value="down", return_fields=["src_ip"]) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 3 + assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.25"]["src_ip"] + assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.26"]["src_ip"] + assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.27"]["src_ip"] + + def test_just_keys_false(self): + req = MatchRequest(db="CONFIG_DB", table="SFLOW", key_pattern="global", just_keys=False) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + recv_dict = ret["keys"][0] + assert isinstance(recv_dict, dict) + exp_dict = {"SFLOW|global": {"admin_state": "up", "polling_interval": "0"}} + ddiff = DeepDiff(exp_dict, recv_dict) + assert not ddiff, ddiff + + def test_file_source(self): + file = os.path.join(dump_test_input, "copp_cfg.json") + req = MatchRequest(file=file, table="COPP_TRAP", field="trap_ids", value="arp_req") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "COPP_TRAP|arp" in ret["keys"] + + def test_file_source_with_key_ptrn(self): + file = os.path.join(dump_test_input, "copp_cfg.json") + req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop") + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "COPP_GROUP|queue4_group2" in ret["keys"] + + def test_file_source_with_not_only_return_keys(self): + file = os.path.join(dump_test_input, "copp_cfg.json") + req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop", just_keys=False) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + recv_dict = ret["keys"][0] + exp_dict = {"COPP_GROUP|queue4_group2": {"trap_action": "copy", "trap_priority": "4", "queue": "4", "meter_type": "packets", "mode": "sr_tcm", "cir": "600", "cbs": "600", "red_action": "drop"}} + ddiff = DeepDiff(exp_dict, recv_dict) + assert not ddiff, ddiff + + def test_match_entire_list(self): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="lanes", value="61,62,63,64", match_entire_list=True, just_keys=True) + ret = self.match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "PORT|Ethernet60" in ret["keys"] + + +class TestNonDefaultNameSpace(unittest.TestCase): + + @classmethod + def setup_class(cls): + print("SETUP") + os.environ["UTILITIES_UNIT_TESTING"] = "2" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic" + from ..mock_tables import mock_multi_asic + reload(mock_multi_asic) + from ..mock_tables import dbconnector + dbconnector.load_namespace_config() + + def teardown_class(cls): + print("TEARDOWN") + os.environ["UTILITIES_UNIT_TESTING"] = "0" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" + + def test_namespace_asic0(self): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="asic_port_name", value="Eth0-ASIC0", ns="asic0") + match_engine = MatchEngine() + ret = match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "PORT|Ethernet0" in ret["keys"] + + def test_namespace_asic1(self): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="Ethernet-BP256", ns="asic1") + match_engine = MatchEngine() + ret = match_engine.fetch(req) + assert ret["error"] == "" + assert len(ret["keys"]) == 1 + assert "PORT|Ethernet-BP256" in ret["keys"]