diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 40d394b9489d12..9dbeb1d6109811 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -476,6 +476,16 @@ jobs: build \ --copy-artifacts-to objdir-clone \ " + - name: Generate an argument environment file + run: | + echo -n "" >/tmp/test_env.yaml + echo "ALL_CLUSTERS_APP: out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app" >> /tmp/test_env.yaml + echo "CHIP_LOCK_APP: out/linux-x64-lock-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-lock-app" >> /tmp/test_env.yaml + echo "ENERGY_MANAGEMENT_APP: out/linux-x64-energy-management-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-energy-management-app" >> /tmp/test_env.yaml + echo "TRACE_APP: out/trace_data/app-{SCRIPT_BASE_NAME}" >> /tmp/test_env.yaml + echo "TRACE_TEST_JSON: out/trace_data/test-{SCRIPT_BASE_NAME}" >> /tmp/test_env.yaml + echo "TRACE_TEST_PERFETTO: out/trace_data/test-{SCRIPT_BASE_NAME}" >> /tmp/test_env.yaml + - name: Run Tests run: | mkdir -p out/trace_data @@ -517,7 +527,7 @@ jobs: scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json --enable-key 000102030405060708090a0b0c0d0e0f" --script "src/python_testing/TC_IDM_1_4.py" --script-args "--hex-arg PIXIT.DGGEN.TEST_EVENT_TRIGGER_KEY:000102030405060708090a0b0c0d0e0f --storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_PWRTL_2_1.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_RR_1_1.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' - scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_SC_3_6.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' + scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --load-from-env /tmp/test_env.yaml --script src/python_testing/TC_SC_3_6.py' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_TIMESYNC_2_1.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_TIMESYNC_2_10.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --quiet --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_TIMESYNC_2_11.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' diff --git a/.pullapprove.yml b/.pullapprove.yml index c18d52a36eb3b9..98dee34c049229 100644 --- a/.pullapprove.yml +++ b/.pullapprove.yml @@ -133,6 +133,14 @@ groups: teams: [reviewers-google] reviews: request: 10 + shared-reviewers-grundfos: + type: optional + conditions: + - files.include('*') + reviewers: + teams: [reviewers-grundfos] + reviews: + request: 10 shared-reviewers-irobot: type: optional conditions: diff --git a/credentials/fetch-paa-certs-from-dcl.py b/credentials/fetch_paa_certs_from_dcl.py similarity index 69% rename from credentials/fetch-paa-certs-from-dcl.py rename to credentials/fetch_paa_certs_from_dcl.py index d440398c472be7..3bcd74b6534e26 100644 --- a/credentials/fetch-paa-certs-from-dcl.py +++ b/credentials/fetch_paa_certs_from_dcl.py @@ -37,6 +37,9 @@ PRODUCTION_NODE_URL_REST = "https://on.dcl.csa-iot.org" TEST_NODE_URL_REST = "https://on.test-net.dcl.csa-iot.org" +MATTER_CERT_CA_SUBJECT = "MFIxDDAKBgNVBAoMA0NTQTEsMCoGA1UEAwwjTWF0dGVyIENlcnRpZmljYXRpb24gYW5kIFRlc3RpbmcgQ0ExFDASBgorBgEEAYKifAIBDARDNUEw" +MATTER_CERT_CA_SUBJECT_KEY_ID = "97:E4:69:D0:C5:04:14:C2:6F:C7:01:F7:7E:94:77:39:09:8D:F6:A5" + def parse_paa_root_certs(cmdpipe, paa_list): """ @@ -73,13 +76,14 @@ def parse_paa_root_certs(cmdpipe, paa_list): else: if b': ' in line: key, value = line.split(b': ') - result[key.strip(b' -').decode("utf-8")] = value.strip().decode("utf-8") + result[key.strip(b' -').decode("utf-8") + ] = value.strip().decode("utf-8") parse_paa_root_certs.counter += 1 if parse_paa_root_certs.counter % 2 == 0: paa_list.append(copy.deepcopy(result)) -def write_paa_root_cert(certificate, subject): +def write_cert(certificate, subject): filename = 'dcld_mirror_' + \ re.sub('[^a-zA-Z0-9_-]', '', re.sub('[=, ]', '_', subject)) with open(filename + '.pem', 'w+') as outfile: @@ -93,7 +97,8 @@ def write_paa_root_cert(certificate, subject): serialization.Encoding.DER) outfile.write(der_certificate) except (IOError, ValueError) as e: - print(f"ERROR: Failed to convert {filename + '.pem'}: {str(e)}. Skipping...") + print( + f"ERROR: Failed to convert {filename + '.pem'}: {str(e)}. Skipping...") def parse_paa_root_cert_from_dcld(cmdpipe): @@ -133,7 +138,38 @@ def use_dcld(dcld, production, cmdlist): @optgroup.option('--paa-trust-store-path', default='paa-root-certs', type=str, metavar='PATH', help="PAA trust store path (default: paa-root-certs)") def main(use_main_net_dcld, use_test_net_dcld, use_main_net_http, use_test_net_http, paa_trust_store_path): """DCL PAA mirroring tools""" + fetch_paa_certs(use_main_net_dcld, use_test_net_dcld, use_main_net_http, use_test_net_http, paa_trust_store_path) + + +def get_cert_from_rest(rest_node_url, subject, subject_key_id): + response = requests.get( + f"{rest_node_url}/dcl/pki/certificates/{subject}/{subject_key_id}").json()["approvedCertificates"]["certs"][0] + certificate = response["pemCert"].rstrip("\n") + subject = response["subjectAsText"] + return certificate, subject + + +def fetch_cd_signing_certs(store_path): + ''' Only supports using main net http currently.''' + rest_node_url = PRODUCTION_NODE_URL_REST + os.makedirs(store_path, exist_ok=True) + original_dir = os.getcwd() + os.chdir(store_path) + cd_signer_ids = requests.get( + f"{rest_node_url}/dcl/pki/child-certificates/{MATTER_CERT_CA_SUBJECT}/{MATTER_CERT_CA_SUBJECT_KEY_ID}").json()['childCertificates']['certIds'] + for signer in cd_signer_ids: + subject = signer['subject'] + subject_key_id = signer['subjectKeyId'] + certificate, subject = get_cert_from_rest(rest_node_url, subject, subject_key_id) + + print(f"Downloaded CD signing cert with subject: {subject}") + write_cert(certificate, subject) + + os.chdir(original_dir) + + +def fetch_paa_certs(use_main_net_dcld, use_test_net_dcld, use_main_net_http, use_test_net_http, paa_trust_store_path): production = False dcld = use_test_net_dcld @@ -148,36 +184,43 @@ def main(use_main_net_dcld, use_test_net_dcld, use_main_net_http, use_test_net_h rest_node_url = PRODUCTION_NODE_URL_REST if production else TEST_NODE_URL_REST os.makedirs(paa_trust_store_path, exist_ok=True) + original_dir = os.getcwd() os.chdir(paa_trust_store_path) if use_rest: - paa_list = requests.get(f"{rest_node_url}/dcl/pki/root-certificates").json()["approvedRootCertificates"]["certs"] + paa_list = requests.get( + f"{rest_node_url}/dcl/pki/root-certificates").json()["approvedRootCertificates"]["certs"] else: cmdlist = ['query', 'pki', 'all-x509-root-certs'] - cmdpipe = subprocess.Popen(use_dcld(dcld, production, cmdlist), stdout=subprocess.PIPE, stderr=subprocess.PIPE) + cmdpipe = subprocess.Popen(use_dcld( + dcld, production, cmdlist), stdout=subprocess.PIPE, stderr=subprocess.PIPE) paa_list = [] parse_paa_root_certs.counter = 0 parse_paa_root_certs(cmdpipe, paa_list) for paa in paa_list: + if paa['subject'] == MATTER_CERT_CA_SUBJECT and paa['subjectKeyId'] == MATTER_CERT_CA_SUBJECT_KEY_ID: + # Don't include the CD signing cert as a PAA root. + continue if use_rest: - response = requests.get( - f"{rest_node_url}/dcl/pki/certificates/{paa['subject']}/{paa['subjectKeyId']}").json()["approvedCertificates"]["certs"][0] - certificate = response["pemCert"] - subject = response["subjectAsText"] + certificate, subject = get_cert_from_rest(rest_node_url, paa['subject'], paa['subjectKeyId']) else: - cmdlist = ['query', 'pki', 'x509-cert', '-u', paa['subject'], '-k', paa['subjectKeyId']] + cmdlist = ['query', 'pki', 'x509-cert', '-u', + paa['subject'], '-k', paa['subjectKeyId']] - cmdpipe = subprocess.Popen(use_dcld(dcld, production, cmdlist), stdout=subprocess.PIPE, stderr=subprocess.PIPE) + cmdpipe = subprocess.Popen(use_dcld( + dcld, production, cmdlist), stdout=subprocess.PIPE, stderr=subprocess.PIPE) (certificate, subject) = parse_paa_root_cert_from_dcld(cmdpipe) certificate = certificate.rstrip('\n') - print(f"Downloaded certificate with subject: {subject}") - write_paa_root_cert(certificate, subject) + print(f"Downloaded PAA certificate with subject: {subject}") + write_cert(certificate, subject) + + os.chdir(original_dir) if __name__ == "__main__": diff --git a/examples/bridge-app/linux/main.cpp b/examples/bridge-app/linux/main.cpp index a894b5d0150ca6..699a87fa8c7938 100644 --- a/examples/bridge-app/linux/main.cpp +++ b/examples/bridge-app/linux/main.cpp @@ -278,6 +278,7 @@ int AddDeviceEndpoint(Device * dev, EmberAfEndpointType * ep, const Span None: + py_script_path: str + run: str + app: str + app_args: str + script_args: str + factoryreset: bool = False + factoryreset_app_only: bool = False + script_gdb: bool = False + quiet: bool = True + + def copy_from_dict(self, attr_dict: Dict[str, Any]) -> None: """ Sets the value of the attributes from a dictionary. Attributes: attr_dict: - Dictionary that stores attributes value that should - be transferred to this class. + Dictionary that stores attributes value that should + be transferred to this class. """ - if "app" in attr_dict: self.app = attr_dict["app"] if "run" in attr_dict: self.run = attr_dict["run"] - if "discriminator" in attr_dict: - self.discriminator = attr_dict["discriminator"] + if "app-args" in attr_dict: + self.app_args = attr_dict["app-args"] - if "passcode" in attr_dict: - self.passcode = attr_dict["passcode"] + if "script-args" in attr_dict: + self.script_args = attr_dict["script-args"] if "py_script_path" in attr_dict: self.py_script_path = attr_dict["py_script_path"] - # TODO - set other attributes as well + if "factoryreset" in attr_dict: + self.factoryreset = bool(attr_dict["factoryreset"]) + + if "factoryreset_app_only" in attr_dict: + self.factoryreset_app_only = bool(attr_dict["factoryreset_app_only"]) + + if "script_gdb" in attr_dict: + self.script_gdb = bool(attr_dict["script_gdb"]) + + if "quiet" in attr_dict: + self.quiet = bool(attr_dict["quiet"]) class MetadataReader: """ - A class to parse run arguments from the test scripts and + A class to parse run arguments from the test scripts and resolve them to environment specific values. """ @@ -70,97 +83,30 @@ def __init__(self, env_yaml_file_path: str): Parameters: env_yaml_file_path: - Path to the environment file that contains the YAML configuration. + Path to the environment file that contains the YAML configuration. """ with open(env_yaml_file_path) as stream: - self.env = yaml.safe_load(stream) + self.env: Dict[str, str] = yaml.safe_load(stream) def __resolve_env_vals__(self, metadata_dict: Dict[str, str]) -> None: """ Resolves the argument defined in the test script to environment values. For example, if a test script defines "all_clusters" as the value for app name, we will check the environment configuration to see what raw value is - assocaited with the "all_cluster" variable and set the value for "app" option + associated with the "all_cluster" variable and set the value for "app" option to this raw value. Parameter: metadata_dict: - Dictionary where each key represent a particular argument and its value represent - the value for that argument defined in the test script. - """ - - for run_arg, run_arg_val in metadata_dict.items(): - - if not type(run_arg_val) == str or run_arg == "run": - metadata_dict[run_arg] = run_arg_val - continue - - if run_arg_val is None: - continue - - sub_args = run_arg_val.split('/') - - if len(sub_args) not in [1, 2]: - err = """The argument is not in the correct format. - The argument must follow the format of arg1 or arg1/arg2. - For example, arg1 represents the argument type and optionally arg2 - represents a specific variable defined in the environment file whose - value should be used as the argument value. If arg2 is not specified, - we will just use the first value associated with arg1 in the environment file.""" - raise Exception(err) - - if len(sub_args) == 1: - run_arg_val = self.env.get(sub_args[0]) - - elif len(sub_args) == 2: - run_arg_val = self.env.get(sub_args[0]).get(sub_args[1]) - - # if a argument has been specified in the comment header - # but can't be found in the env file, consider it to be - # boolean value. - if run_arg_val is None: - run_arg_val = True - - metadata_dict[run_arg] = run_arg_val - - def __read_args__(self, run_args_lines: List[str]) -> Dict[str, str]: + Dictionary where each key represent a particular argument and its value represent + the value for that argument defined in the test script. """ - Parses a list of lines and extracts argument - values from it. - - Parameters: - - run_args_lines: - Line in test script header that contains run argument definition. - Each line will contain a list of run arguments separated by a space. - Line below is one example of what the run argument line will look like: - "app/all-clusters discriminator KVS storage-path" - - In this case the line defines that app, discriminator, KVS, and storage-path - are the arguments that should be used with this run. - - An argument can be defined multiple times in the same line or in different lines. - The last definition will override any previous definition. For example, - "KVS/kvs1 KVS/kvs2 KVS/kvs3" line will lead to KVS value of kvs3. - """ - metadata_dict = {} - - for run_line in run_args_lines: - for run_arg_word in run_line.strip().split(): - ''' - We expect the run arg to be defined in one of the - following two formats: - 1. run_arg - 2. run_arg/run_arg_val - - Examples: "discriminator" and "app/all_clusters" - - ''' - run_arg = run_arg_word.split('/', 1)[0] - metadata_dict[run_arg] = run_arg_word - - return metadata_dict + for arg, arg_val in metadata_dict.items(): + # We do not expect to recurse (like ${FOO_${BAR}}) so just expand once + for name, value in self.env.items(): + arg_val = arg_val.replace(f'${{{name}}}', value) + metadata_dict[arg] = arg_val def parse_script(self, py_script_path: str) -> List[Metadata]: """ @@ -171,47 +117,51 @@ def parse_script(self, py_script_path: str) -> List[Metadata]: Parameter: py_script_path: - path to the python test script + path to the python test script Return: List[Metadata] - List of Metadata object where each Metadata element represents - the run arguments associated with a particular run defined in - the script file. + List of Metadata object where each Metadata element represents + the run arguments associated with a particular run defined in + the script file. """ runs_def_ptrn = re.compile(r'^\s*#\s*test-runner-runs:\s*(.*)$') - args_def_ptrn = re.compile(r'^\s*#\s*test-runner-run/([a-zA-Z0-9_]+):\s*(.*)$') + arg_def_ptrn = re.compile(r'^\s*#\s*test-runner-run/([a-zA-Z0-9_]+)/([a-zA-Z0-9_\-]+):\s*(.*)$') - runs_arg_lines: Dict[str, List[str]] = {} - runs_metadata = [] + runs_arg_lines: Dict[str, Dict[str, str]] = {} + runs_metadata: List[Metadata] = [] with open(py_script_path, 'r', encoding='utf8') as py_script: for line in py_script.readlines(): - runs_match = runs_def_ptrn.match(line.strip()) - args_match = args_def_ptrn.match(line.strip()) + args_match = arg_def_ptrn.match(line.strip()) if runs_match: for run in runs_match.group(1).strip().split(): - runs_arg_lines[run] = [] + runs_arg_lines[run] = {} + runs_arg_lines[run]['run'] = run + runs_arg_lines[run]['py_script_path'] = py_script_path elif args_match: - runs_arg_lines[args_match.group(1)].append(args_match.group(2)) - - for run, lines in runs_arg_lines.items(): - metadata_dict = self.__read_args__(lines) - self.__resolve_env_vals__(metadata_dict) - - # store the run value and script location in the - # metadata object - metadata_dict['py_script_path'] = py_script_path - metadata_dict['run'] = run - - metadata = Metadata() - - metadata.copy_from_dict(metadata_dict) + runs_arg_lines[args_match.group(1)][args_match.group(2)] = args_match.group(3) + + for run, attr in runs_arg_lines.items(): + self.__resolve_env_vals__(attr) + + metadata = Metadata( + py_script_path=attr.get("py_script_path", ""), + run=attr.get("run", ""), + app=attr.get("app", ""), + app_args=attr.get("app_args", ""), + script_args=attr.get("script_args", ""), + factoryreset=bool(attr.get("factoryreset", False)), + factoryreset_app_only=bool(attr.get("factoryreset_app_only", False)), + script_gdb=bool(attr.get("script_gdb", False)), + quiet=bool(attr.get("quiet", True)) + ) + metadata.copy_from_dict(attr) runs_metadata.append(metadata) return runs_metadata diff --git a/scripts/tests/py/test_metadata.py b/scripts/tests/py/test_metadata.py index 8707d483026bfb..a0c12a0ab0ed5a 100644 --- a/scripts/tests/py/test_metadata.py +++ b/scripts/tests/py/test_metadata.py @@ -12,39 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import tempfile import unittest -from os import path -from typing import List from metadata import Metadata, MetadataReader class TestMetadataReader(unittest.TestCase): - def setUp(self): - # build the reader object - self.reader = MetadataReader(path.join(path.dirname(__file__), "env_test.yaml")) + test_file_content = ''' + # test-runner-runs: run1 + # test-runner-run/run1/app: ${ALL_CLUSTERS_APP} + # test-runner-run/run1/app-args: --discriminator 1234 --trace-to json:${TRACE_APP}.json + # test-runner-run/run1/script-args: --commissioning-method on-network --trace-to json:${TRACE_TEST_JSON}.json --trace-to perfetto:${TRACE_TEST_PERFETTO}.perfetto + # test-runner-run/run1/factoryreset: True + # test-runner-run/run1/quiet: True + ''' - def assertMetadataParse(self, file_content: str, expected: List[Metadata]): - with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: + env_file_content = ''' + ALL_CLUSTERS_APP: out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app + CHIP_LOCK_APP: out/linux-x64-lock-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-lock-app + ENERGY_MANAGEMENT_APP: out/linux-x64-energy-management-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-energy-management-app + TRACE_APP: out/trace_data/app-{SCRIPT_BASE_NAME} + TRACE_TEST_JSON: out/trace_data/test-{SCRIPT_BASE_NAME} + TRACE_TEST_PERFETTO: out/trace_data/test-{SCRIPT_BASE_NAME} + ''' + + expected_metadata = Metadata( + script_args="--commissioning-method on-network --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto", + py_script_path="", + app_args="--discriminator 1234 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json", + run="run1", + app="out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app", + factoryreset=True, + quiet=True + ) + + def generate_temp_file(self, directory: str, file_content: str) -> str: + fd, temp_file_path = tempfile.mkstemp(dir=directory) + with os.fdopen(fd, 'w') as fp: fp.write(file_content) - fp.close() - for e in expected: - e.py_script_path = fp.name - actual = self.reader.parse_script(fp.name) - self.assertEqual(actual, expected) - - def test_parse_single_run(self): - self.assertMetadataParse(''' - # test-runner-runs: run1 - # test-runner-run/run1: app/all-clusters discriminator passcode - ''', - [ - Metadata(app="out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app", - discriminator=1234, run="run1", passcode=20202021) - ] - ) + return temp_file_path + + def test_run_arg_generation(self): + with tempfile.TemporaryDirectory() as temp_dir: + temp_file = self.generate_temp_file(temp_dir, self.test_file_content) + env_file = self.generate_temp_file(temp_dir, self.env_file_content) + + reader = MetadataReader(env_file) + self.maxDiff = None + + self.expected_metadata.py_script_path = temp_file + actual = reader.parse_script(temp_file)[0] + self.assertEqual(self.expected_metadata, actual) if __name__ == "__main__": diff --git a/scripts/tests/run_python_test.py b/scripts/tests/run_python_test.py index 8eab21f8798a51..5ccd67a987a401 100755 --- a/scripts/tests/run_python_test.py +++ b/scripts/tests/run_python_test.py @@ -32,6 +32,7 @@ import click import coloredlogs from colorama import Fore, Style +from py.metadata import Metadata, MetadataReader DEFAULT_CHIP_ROOT = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')) @@ -89,7 +90,34 @@ def DumpProgramOutputToQueue(thread_list: typing.List[threading.Thread], tag: st @click.option("--script-gdb", is_flag=True, help='Run script through gdb') @click.option("--quiet", is_flag=True, help="Do not print output from passing tests. Use this flag in CI to keep github log sizes manageable.") -def main(app: str, factoryreset: bool, factoryreset_app_only: bool, app_args: str, script: str, script_args: str, script_gdb: bool, quiet: bool): +@click.option("--load-from-env", default=None, help="YAML file that contains values for environment variables.") +def main(app: str, factoryreset: bool, factoryreset_app_only: bool, app_args: str, script: str, script_args: str, script_gdb: bool, quiet: bool, load_from_env): + if load_from_env: + reader = MetadataReader(load_from_env) + runs = reader.parse_script(script) + else: + runs = [ + Metadata( + py_script_path=script, + run="cmd-run", + app=app, + app_args=app_args, + script_args=script_args, + factoryreset=factoryreset, + factoryreset_app_only=factoryreset_app_only, + script_gdb=script_gdb, + quiet=quiet + ) + ] + + for run in runs: + print(f"Executing {run.py_script_path.split('/')[-1]} {run.run}") + main_impl(run.app, run.factoryreset, run.factoryreset_app_only, run.app_args, + run.py_script_path, run.script_args, run.script_gdb, run.quiet) + + +def main_impl(app: str, factoryreset: bool, factoryreset_app_only: bool, app_args: str, script: str, script_args: str, script_gdb: bool, quiet: bool): + app_args = app_args.replace('{SCRIPT_BASE_NAME}', os.path.splitext(os.path.basename(script))[0]) script_args = script_args.replace('{SCRIPT_BASE_NAME}', os.path.splitext(os.path.basename(script))[0]) @@ -189,7 +217,8 @@ def main(app: str, factoryreset: bool, factoryreset_app_only: bool, app_args: st else: logging.info("Test completed successfully") - sys.exit(exit_code) + if exit_code != 0: + sys.exit(exit_code) if __name__ == '__main__': diff --git a/src/controller/python/chip/ChipDeviceCtrl.py b/src/controller/python/chip/ChipDeviceCtrl.py index 6e2e1336b5c837..736bfae0a5cf85 100644 --- a/src/controller/python/chip/ChipDeviceCtrl.py +++ b/src/controller/python/chip/ChipDeviceCtrl.py @@ -254,13 +254,18 @@ class DeviceProxyWrapper(): that is not an issue that needs to be accounted for and it will become very apparent if that happens. ''' + class DeviceProxyType(enum.Enum): + OPERATIONAL = enum.auto(), + COMMISSIONEE = enum.auto(), - def __init__(self, deviceProxy: ctypes.c_void_p, dmLib=None): + def __init__(self, deviceProxy: ctypes.c_void_p, proxyType, dmLib=None): self._deviceProxy = deviceProxy self._dmLib = dmLib + self._proxyType = proxyType def __del__(self): - if (self._dmLib is not None and hasattr(builtins, 'chipStack') and builtins.chipStack is not None): + # Commissionee device proxies are owned by the DeviceCommissioner. See #33031 + if (self._proxyType == self.DeviceProxyType.OPERATIONAL and self.self._dmLib is not None and hasattr(builtins, 'chipStack') and builtins.chipStack is not None): # This destructor is called from any threading context, including on the Matter threading context. # So, we cannot call chipStack.Call or chipStack.CallAsyncWithCompleteCallback which waits for the posted work to # actually be executed. Instead, we just post/schedule the work and move on. @@ -861,7 +866,23 @@ def GetClusterHandler(self): return self._Cluster - def GetConnectedDeviceSync(self, nodeid, allowPASE: bool = True, timeoutMs: int = None): + def FindOrEstablishPASESession(self, setupCode: str, nodeid: int, timeoutMs: int = None) -> typing.Optional[DeviceProxyWrapper]: + ''' Returns CommissioneeDeviceProxy if we can find or establish a PASE connection to the specified device''' + self.CheckIsActive() + returnDevice = c_void_p(None) + res = self._ChipStack.Call(lambda: self._dmLib.pychip_GetDeviceBeingCommissioned( + self.devCtrl, nodeid, byref(returnDevice)), timeoutMs) + if res.is_success: + return DeviceProxyWrapper(returnDevice, DeviceProxyWrapper.DeviceProxyType.COMMISSIONEE, self._dmLib) + + self.EstablishPASESession(setupCode, nodeid) + + res = self._ChipStack.Call(lambda: self._dmLib.pychip_GetDeviceBeingCommissioned( + self.devCtrl, nodeid, byref(returnDevice)), timeoutMs) + if res.is_success: + return DeviceProxyWrapper(returnDevice, DeviceProxyWrapper.DeviceProxyType.COMMISSIONEE, self._dmLib) + + def GetConnectedDeviceSync(self, nodeid, allowPASE=True, timeoutMs: int = None): ''' Gets an OperationalDeviceProxy or CommissioneeDeviceProxy for the specified Node. nodeId: Target's Node ID @@ -882,7 +903,7 @@ def GetConnectedDeviceSync(self, nodeid, allowPASE: bool = True, timeoutMs: int self.devCtrl, nodeid, byref(returnDevice)), timeoutMs) if res.is_success: logging.info('Using PASE connection') - return DeviceProxyWrapper(returnDevice) + return DeviceProxyWrapper(returnDevice, DeviceProxyWrapper.DeviceProxyType.COMMISSIONEE, self._dmLib) class DeviceAvailableClosure(): def deviceAvailable(self, device, err): @@ -916,7 +937,7 @@ def deviceAvailable(self, device, err): if returnDevice.value is None: returnErr.raise_on_error() - return DeviceProxyWrapper(returnDevice, self._dmLib) + return DeviceProxyWrapper(returnDevice, DeviceProxyWrapper.DeviceProxyType.OPERATIONAL, self._dmLib) async def WaitForActive(self, nodeid, *, timeoutSeconds=30.0, stayActiveDurationMs=30000): ''' Waits a LIT ICD device to become active. Will send a StayActive command to the device on active to allow human operations. @@ -948,7 +969,7 @@ async def GetConnectedDevice(self, nodeid, allowPASE: bool = True, timeoutMs: in self.devCtrl, nodeid, byref(returnDevice)), timeoutMs) if res.is_success: logging.info('Using PASE connection') - return DeviceProxyWrapper(returnDevice) + return DeviceProxyWrapper(returnDevice, DeviceProxyWrapper.DeviceProxyType.COMMISSIONEE, self._dmLib) eventLoop = asyncio.get_running_loop() future = eventLoop.create_future() @@ -987,7 +1008,7 @@ def deviceAvailable(self, device, err): else: await future - return DeviceProxyWrapper(future.result(), self._dmLib) + return DeviceProxyWrapper(future.result(), DeviceProxyWrapper.DeviceProxyType.OPERATIONAL, self._dmLib) def ComputeRoundTripTimeout(self, nodeid, upperLayerProcessingTimeoutMs: int = 0): ''' Returns a computed timeout value based on the round-trip time it takes for the peer at the other end of the session to diff --git a/src/platform/ESP32/Logging.cpp b/src/platform/ESP32/Logging.cpp index 07ec4a1690bd40..e8e53aa9e6275d 100644 --- a/src/platform/ESP32/Logging.cpp +++ b/src/platform/ESP32/Logging.cpp @@ -3,7 +3,6 @@ #include #include -#include #include #include @@ -19,7 +18,7 @@ namespace chip { namespace Logging { namespace Platform { -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { char tag[11]; diff --git a/src/platform/Linux/Logging.cpp b/src/platform/Linux/Logging.cpp index 28a19a6a30ca28..25e132e1a12b76 100644 --- a/src/platform/Linux/Logging.cpp +++ b/src/platform/Linux/Logging.cpp @@ -1,9 +1,9 @@ /* See Project CHIP LICENSE file for licensing information. */ +#include + #include -#include #include -#include #include #include @@ -35,7 +35,7 @@ namespace Platform { /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { struct timeval tv; diff --git a/src/platform/NuttX/Logging.cpp b/src/platform/NuttX/Logging.cpp index e10ec76d0a9c12..ea95fdc4fbc7ef 100644 --- a/src/platform/NuttX/Logging.cpp +++ b/src/platform/NuttX/Logging.cpp @@ -1,9 +1,9 @@ /* See Project CHIP LICENSE file for licensing information. */ +#include + #include -#include #include -#include #include #include @@ -35,7 +35,7 @@ namespace Platform { /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { struct timeval tv; diff --git a/src/platform/Tizen/Logging.cpp b/src/platform/Tizen/Logging.cpp index a228c8ef2b61c5..cfedc7fd56d031 100644 --- a/src/platform/Tizen/Logging.cpp +++ b/src/platform/Tizen/Logging.cpp @@ -15,15 +15,15 @@ * limitations under the License. */ +#include + #include #include #include #include -#include #include -#include namespace chip { namespace Logging { @@ -32,7 +32,7 @@ namespace Platform { /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { static constexpr char kLogTag[] = "CHIP"; diff --git a/src/platform/Zephyr/Logging.cpp b/src/platform/Zephyr/Logging.cpp index 4f7322534e331e..a5c1ad7b063d03 100644 --- a/src/platform/Zephyr/Logging.cpp +++ b/src/platform/Zephyr/Logging.cpp @@ -3,7 +3,6 @@ #include #include -#include #include #include @@ -42,7 +41,7 @@ namespace Platform { * CHIP log output function. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { char formattedMsg[CHIP_CONFIG_LOG_MESSAGE_MAX_SIZE]; snprintfcb(formattedMsg, sizeof(formattedMsg), "[%s]", module); diff --git a/src/platform/logging/impl/android/Logging.cpp b/src/platform/logging/impl/android/Logging.cpp index fc6433113bd740..404c76f803ba12 100644 --- a/src/platform/logging/impl/android/Logging.cpp +++ b/src/platform/logging/impl/android/Logging.cpp @@ -1,16 +1,16 @@ /* See Project chip LICENSE file for licensing information. */ -#include -#include #include +#include + #include namespace chip { namespace Logging { namespace Platform { -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { int priority = (category == kLogCategory_Error) ? ANDROID_LOG_ERROR : ANDROID_LOG_DEBUG; __android_log_vprint(priority, module, msg, v); diff --git a/src/platform/logging/impl/stdio/Logging.cpp b/src/platform/logging/impl/stdio/Logging.cpp index 6eceebb2e961df..c403c29b913293 100644 --- a/src/platform/logging/impl/stdio/Logging.cpp +++ b/src/platform/logging/impl/stdio/Logging.cpp @@ -1,16 +1,16 @@ /* See Project CHIP LICENSE file for licensing information. */ -#include -#include #include +#include + #include namespace chip { namespace Logging { namespace Platform { -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { printf("CHIP:%s: ", module); vprintf(msg, v); diff --git a/src/platform/logging/impl/stdio/darwin/Logging.cpp b/src/platform/logging/impl/stdio/darwin/Logging.cpp index f5599f5996b570..49c58eea1d7bba 100644 --- a/src/platform/logging/impl/stdio/darwin/Logging.cpp +++ b/src/platform/logging/impl/stdio/darwin/Logging.cpp @@ -15,10 +15,10 @@ * limitations under the License. */ +#include + #include -#include #include -#include #include #include #include @@ -28,7 +28,7 @@ namespace chip { namespace Logging { namespace Platform { -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { timeval time; gettimeofday(&time, nullptr); diff --git a/src/platform/mbed/Logging.cpp b/src/platform/mbed/Logging.cpp index 1b85d2c0ec8d7b..0e03fa81259520 100644 --- a/src/platform/mbed/Logging.cpp +++ b/src/platform/mbed/Logging.cpp @@ -21,15 +21,15 @@ * Logging implementation for Mbed platform */ -#include -#include -#include -#include #include #include #include +#include +#include +#include + #include "mbed-trace/mbed_trace.h" #ifdef MBED_TRACE_FILTER_LENGTH @@ -66,7 +66,7 @@ char logMsgBuffer[CHIP_CONFIG_LOG_MESSAGE_MAX_SIZE]; /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { size_t prefixLen = 0; snprintf(logMsgBuffer, sizeof(logMsgBuffer), "[%s]", module); diff --git a/src/platform/nxp/k32w/k32w0/Logging.cpp b/src/platform/nxp/k32w/k32w0/Logging.cpp index 5fc5b64d4404d9..2a9c0816b0facd 100644 --- a/src/platform/nxp/k32w/k32w0/Logging.cpp +++ b/src/platform/nxp/k32w/k32w0/Logging.cpp @@ -2,7 +2,9 @@ #include +#include #include + #include #include #include @@ -10,7 +12,6 @@ #include #include "fsl_debug_console.h" -#include #define K32W_LOG_MODULE_NAME chip #define EOL_CHARS "\r\n" /* End of Line Characters */ @@ -128,7 +129,7 @@ namespace Platform { /** * CHIP log output function. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { (void) module; (void) category; diff --git a/src/platform/nxp/k32w/k32w1/Logging.cpp b/src/platform/nxp/k32w/k32w1/Logging.cpp index 5c68f4f11dab7a..8f8aedd66741f8 100644 --- a/src/platform/nxp/k32w/k32w1/Logging.cpp +++ b/src/platform/nxp/k32w/k32w1/Logging.cpp @@ -146,7 +146,7 @@ namespace Platform { /** * CHIP log output function. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { (void) module; (void) category; diff --git a/src/platform/openiotsdk/Logging.cpp b/src/platform/openiotsdk/Logging.cpp index bc51e4e44d6570..fad4c8470221c0 100644 --- a/src/platform/openiotsdk/Logging.cpp +++ b/src/platform/openiotsdk/Logging.cpp @@ -22,11 +22,11 @@ * for Open IOT SDK platform. */ +#include + #include -#include #include #include -#include #include #include @@ -50,7 +50,7 @@ namespace Platform { /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { char logMsgBuffer[CHIP_CONFIG_LOG_MESSAGE_MAX_SIZE]; diff --git a/src/platform/qpg/Logging.cpp b/src/platform/qpg/Logging.cpp index 8a30a63210f622..549b92b9df5908 100644 --- a/src/platform/qpg/Logging.cpp +++ b/src/platform/qpg/Logging.cpp @@ -5,7 +5,6 @@ #include #include -#include #include #include #include @@ -67,7 +66,7 @@ static size_t AddTimeStampAndPrefixStr(char * logBuffer, const char * prefix, si * CHIP log output function. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { char formattedMsg[CHIP_CONFIG_LOG_MESSAGE_MAX_SIZE]; size_t formattedMsgLen; diff --git a/src/platform/webos/Logging.cpp b/src/platform/webos/Logging.cpp index 08929b94f88a65..6d4ceb91ea3579 100644 --- a/src/platform/webos/Logging.cpp +++ b/src/platform/webos/Logging.cpp @@ -1,7 +1,5 @@ /* See Project CHIP LICENSE file for licensing information. */ -#include -#include #include #include @@ -10,6 +8,8 @@ #include #include +#include + #ifdef USE_SYSLOG #include #endif @@ -33,7 +33,7 @@ namespace Platform { /** * CHIP log output functions. */ -void ENFORCE_FORMAT(3, 0) LogV(const char * module, uint8_t category, const char * msg, va_list v) +void LogV(const char * module, uint8_t category, const char * msg, va_list v) { struct timeval tv; diff --git a/src/python_testing/TC_DA_1_2.py b/src/python_testing/TC_DA_1_2.py index 759fa3eae3e2a7..08fb375139088b 100644 --- a/src/python_testing/TC_DA_1_2.py +++ b/src/python_testing/TC_DA_1_2.py @@ -20,6 +20,7 @@ import re import chip.clusters as Clusters +from basic_composition_support import BasicCompositionTests from chip.interaction_model import InteractionModelError, Status from chip.tlv import TLVReader from cryptography import x509 @@ -104,7 +105,7 @@ def parse_ids_from_certs(dac: x509.Certificate, pai: x509.Certificate) -> tuple( # default is 'credentials/development/cd-certs'. -class TC_DA_1_2(MatterBaseTest): +class TC_DA_1_2(MatterBaseTest, BasicCompositionTests): def desc_TC_DA_1_2(self): return "Device Attestation Request Validation [DUT - Commissionee]" @@ -164,6 +165,11 @@ def steps_TC_DA_1_2(self): async def test_TC_DA_1_2(self): is_ci = self.check_pics('PICS_SDK_CI_ONLY') cd_cert_dir = self.user_params.get("cd_cert_dir", 'credentials/development/cd-certs') + post_cert_test = self.user_params.get("post_cert_test", False) + + do_test_over_pase = self.user_params.get("use_pase_only", False) + if do_test_over_pase: + self.connect_over_pase(self.default_controller) # Commissioning - done self.step(0) @@ -308,7 +314,9 @@ async def test_TC_DA_1_2(self): self.step("6.8") asserts.assert_in(version_number, range(0, 65535), "Version number out of range") self.step("6.9") - if is_ci: + if post_cert_test: + asserts.assert_equal(certification_type, 2, "Certification declaration is not marked as production.") + elif is_ci: asserts.assert_in(certification_type, [0, 1, 2], "Certification type is out of range") else: asserts.assert_in(certification_type, [1, 2], "Certification type is out of range") @@ -392,7 +400,7 @@ async def test_TC_DA_1_2(self): self.mark_current_step_skipped() self.step(12) - proxy = self.default_controller.GetConnectedDeviceSync(self.dut_node_id, False) + proxy = self.default_controller.GetConnectedDeviceSync(self.dut_node_id, do_test_over_pase) asserts.assert_equal(len(proxy.attestationChallenge), 16, "Attestation challenge is the wrong length") attestation_tbs = elements + proxy.attestationChallenge diff --git a/src/python_testing/TC_SC_3_6.py b/src/python_testing/TC_SC_3_6.py index ec09d4bb8e815e..9247cb546e19c6 100644 --- a/src/python_testing/TC_SC_3_6.py +++ b/src/python_testing/TC_SC_3_6.py @@ -15,6 +15,14 @@ # limitations under the License. # +# test-runner-runs: run1 +# test-runner-run/run1/app: ${ALL_CLUSTERS_APP} +# test-runner-run/run1/factoryreset: True +# test-runner-run/run1/quiet: True +# test-runner-run/run1/app-args: --discriminator 1234 --KVS kvs1 --trace-to json:${TRACE_APP}.json +# test-runner-run/run1/script-args: --storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:${TRACE_TEST_JSON}.json --trace-to perfetto:${TRACE_TEST_PERFETTO}.perfetto + + import asyncio import logging import queue diff --git a/src/python_testing/basic_composition_support.py b/src/python_testing/basic_composition_support.py index 299b7da194b1bb..8cc958a7207b3d 100644 --- a/src/python_testing/basic_composition_support.py +++ b/src/python_testing/basic_composition_support.py @@ -98,6 +98,11 @@ def ConvertValue(value) -> Any: class BasicCompositionTests: + def connect_over_pase(self, dev_ctrl): + setupCode = self.matter_test_config.qr_code_content if self.matter_test_config.qr_code_content is not None else self.matter_test_config.manual_code + asserts.assert_true(setupCode, "Require either --qr-code or --manual-code.") + dev_ctrl.FindOrEstablishPASESession(setupCode, self.dut_node_id) + def dump_wildcard(self, dump_device_composition_path: typing.Optional[str]): node_dump_dict = {endpoint_id: MatterTlvToJson(self.endpoints_tlv[endpoint_id]) for endpoint_id in self.endpoints_tlv} logging.debug(f"Raw TLV contents of Node: {json.dumps(node_dump_dict, indent=2)}") @@ -116,10 +121,8 @@ async def setup_class_helper(self, default_to_pase: bool = True): dump_device_composition_path: Optional[str] = self.user_params.get("dump_device_composition_path", None) if do_test_over_pase: - setupCode = self.matter_test_config.qr_code_content if self.matter_test_config.qr_code_content is not None else self.matter_test_config.manual_code - asserts.assert_true(setupCode, "Require either --qr-code or --manual-code.") + self.connect_over_pase(dev_ctrl) node_id = self.dut_node_id - dev_ctrl.EstablishPASESession(setupCode, node_id) else: # Using the already commissioned node node_id = self.dut_node_id diff --git a/src/python_testing/matter_testing_support.py b/src/python_testing/matter_testing_support.py index e0acefd1c61782..9fad2a0d316178 100644 --- a/src/python_testing/matter_testing_support.py +++ b/src/python_testing/matter_testing_support.py @@ -1670,7 +1670,7 @@ def run_tests_no_exit(test_class: MatterBaseTest, matter_test_config: MatterTest if hooks: # Right now, we only support running a single test class at once, - # but it's relatively easy to exapand that to make the test process faster + # but it's relatively easy to expand that to make the test process faster # TODO: support a list of tests hooks.start(count=1) # Mobly gives the test run time in seconds, lets be a bit more precise diff --git a/src/python_testing/post_certification_tests/production_device_checks.py b/src/python_testing/post_certification_tests/production_device_checks.py new file mode 100644 index 00000000000000..0e8fd617c44110 --- /dev/null +++ b/src/python_testing/post_certification_tests/production_device_checks.py @@ -0,0 +1,477 @@ +# +# Copyright (c) 2024 Project CHIP Authors +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This test is used to evaluate that all the proper post-certification +# work has been done to make a Matter device production ready. +# This test ensure that: +# - DAC chain is valid and spec compliant, and chains up to a PAA that +# is registered in the main net DCL +# - CD is valid and, signed by one of the known CSA signing certs and +# is marked as a production CD +# - DCL entries for this device and vendor have all been registered +# - DCL OTA entries have proper sizes and checksums +# - TestEventTriggers have been turned off +# +# This test is performed over PASE on a factory reset device. +# +# To run this test, first build and install the python chip wheel +# files, then add the extra dependencies. From the root: +# +# . scripts/activate.sh +# ./scripts/build_python.sh -i py +# source py/bin/activate +# pip install opencv-python requests click_option_group +# python src/python_testing/post_certification_tests/production_device_checks.py + +import base64 +import hashlib +import importlib +import logging +import os +import shutil +import sys +import time +import typing +import uuid +from dataclasses import dataclass +from enum import Enum, auto +from pathlib import Path + +import chip.clusters as Clusters +import cv2 +import requests +from mobly import asserts + +DEFAULT_CHIP_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', '..')) + +try: + from basic_composition_support import BasicCompositionTests + from matter_testing_support import (MatterBaseTest, MatterStackState, MatterTestConfig, TestStep, async_test_body, + run_tests_no_exit) +except ImportError: + sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), '..'))) + from basic_composition_support import BasicCompositionTests + from matter_testing_support import (MatterBaseTest, MatterStackState, MatterTestConfig, TestStep, async_test_body, + run_tests_no_exit) + +try: + import fetch_paa_certs_from_dcl +except ImportError: + sys.path.append(os.path.abspath( + os.path.join(DEFAULT_CHIP_ROOT, 'credentials'))) + import fetch_paa_certs_from_dcl + + +@dataclass +class Failure: + step: str + exception: typing.Optional[Exception] + + +class Hooks(): + def __init__(self): + self.failures = {} + self.current_step = 'unknown' + self.current_test = 'unknown' + + def start(self, count: int): + pass + + def stop(self, duration: int): + pass + + def test_start(self, filename: str, name: str, count: int): + self.current_test = name + pass + + def test_stop(self, exception: Exception, duration: int): + # Exception is the test assertion that caused the failure + if exception: + self.failures[self.current_test].exception = exception + + def step_skipped(self, name: str, expression: str): + pass + + def step_start(self, name: str): + self.current_step = name + + def step_success(self, logger, logs, duration: int, request): + pass + + def step_failure(self, logger, logs, duration: int, request, received): + self.failures[self.current_test] = Failure(self.current_step, None) + + def step_unknown(self): + pass + + def get_failures(self) -> list[str]: + return self.failures + + +class TestEventTriggersCheck(MatterBaseTest, BasicCompositionTests): + @async_test_body + async def test_TestEventTriggersCheck(self): + self.connect_over_pase(self.default_controller) + gd = Clusters.GeneralDiagnostics + ret = await self.read_single_attribute_check_success(cluster=gd, attribute=gd.Attributes.TestEventTriggersEnabled) + asserts.assert_equal(ret, 0, "TestEventTriggers are still on") + + +class DclCheck(MatterBaseTest, BasicCompositionTests): + @async_test_body + async def setup_class(self): + self.connect_over_pase(self.default_controller) + bi = Clusters.BasicInformation + self.vid = await self.read_single_attribute_check_success(cluster=bi, attribute=bi.Attributes.VendorID) + self.pid = await self.read_single_attribute_check_success(cluster=bi, attribute=bi.Attributes.ProductID) + self.software_version = await self.read_single_attribute_check_success(cluster=bi, attribute=bi.Attributes.SoftwareVersion) + self.url = fetch_paa_certs_from_dcl.PRODUCTION_NODE_URL_REST + + self.vid_str = f'vid = 0x{self.vid:04X}' + self.vid_pid_str = f'{self.vid_str} pid = 0x{self.pid:04X}' + self.vid_pid_sv_str = f'{self.vid_pid_str} software version = {self.software_version}' + + def steps_Vendor(self): + return [TestStep(1, "Check if device VID is listed in the DCL vendor schema", "Listing found")] + + def test_Vendor(self): + self.step(1) + entry = requests.get(f"{self.url}/dcl/vendorinfo/vendors/{self.vid}").json() + key = 'vendorInfo' + asserts.assert_true(key in entry.keys(), f"Unable to find vendor entry for {self.vid_str}") + logging.info(f'Found vendor key for {self.vid_str} in the DCL:') + logging.info(f'{entry[key]}') + + def steps_Model(self): + return [TestStep(1, "Check if device VID/PID are listed in the DCL model schema", "Listing found")] + + def test_Model(self): + self.step(1) + key = 'model' + entry = requests.get(f"{self.url}/dcl/model/models/{self.vid}/{self.pid}").json() + asserts.assert_true(key in entry.keys(), f"Unable to find model entry for {self.vid_pid_str}") + logging.info(f'Found model entry for {self.vid_pid_str} in the DCL:') + logging.info(f'{entry[key]}') + + def steps_Compliance(self): + return [TestStep(1, "Check if device VID/PID/SoftwareVersion are listed in the DCL compliance info schema", "Listing found")] + + def test_Compliance(self): + self.step(1) + key = 'complianceInfo' + entry = requests.get( + f"{self.url}/dcl/compliance/compliance-info/{self.vid}/{self.pid}/{self.software_version}/matter").json() + asserts.assert_true(key in entry.keys(), + f"Unable to find compliance entry for {self.vid_pid_sv_str}") + logging.info( + f'Found compliance info for {self.vid_pid_sv_str} in the DCL:') + logging.info(f'{entry[key]}') + + def steps_CertifiedModel(self): + return [TestStep(1, "Check if device VID/PID/SoftwareVersion are listed in the DCL certified model schema", "Listing found")] + + def test_CertifiedModel(self): + self.step(1) + key = 'certifiedModel' + entry = requests.get( + f"{self.url}/dcl/compliance/certified-models/{self.vid}/{self.pid}/{self.software_version}/matter").json() + asserts.assert_true(key in entry.keys(), + f"Unable to find certified model entry for {self.vid_pid_sv_str}") + logging.info( + f'Found certified model for {self.vid_pid_sv_str} in the DCL:') + logging.info(f'{entry[key]}') + + def steps_AllSoftwareVersions(self): + return [TestStep(1, "Query the version information for this software version", "DCL entry exists"), + TestStep(2, "For each valid software version with an OtaUrl, verify the OtaChecksumType is in the valid range and the OtaChecksum is a base64. If the softwareVersion matches the current softwareVersion on the device, ensure the entry is valid.", "OtaChecksum is base64 and OtaChecksumType is in the valid set")] + + def test_AllSoftwareVersions(self): + self.step(1) + versions_entry = requests.get(f"{self.url}/dcl/model/versions/{self.vid}/{self.pid}").json() + key_model_versions = 'modelVersions' + asserts.assert_true(key_model_versions in versions_entry.keys(), + f"Unable to find {key_model_versions} in software versions schema for {self.vid_pid_str}") + logging.info(f'Found version info for vid=0x{self.vid_pid_str} in the DCL:') + logging.info(f'{versions_entry[key_model_versions]}') + key_software_versions = 'softwareVersions' + asserts.assert_true(key_software_versions in versions_entry[key_model_versions].keys( + ), f"Unable to find {key_software_versions} in software versions schema for {self.vid_pid_str}") + + problems = [] + self.step(2) + for software_version in versions_entry[key_model_versions][key_software_versions]: + entry_wrapper = requests.get(f"{self.url}/dcl/model/versions/{self.vid}/{self.pid}/{software_version}").json() + key_model_version = 'modelVersion' + if key_model_version not in entry_wrapper: + problems.append( + f'Missing key {key_model_version} in entry for {self.vid_pid_str} software version={software_version}') + continue + logging.info(f'Found entry version entry for {self.vid_pid_str} software version={software_version}') + logging.info(entry_wrapper) + entry = entry_wrapper[key_model_version] + key_ota_url = 'otaUrl' + key_software_version_valid = 'softwareVersionValid' + key_ota_checksum = 'otaChecksum' + key_ota_checksum_type = 'otaChecksumType' + key_ota_file_size = 'otaFileSize' + + def check_key(key): + if key not in entry.keys(): + problems.append( + f'Missing key {key} in DCL versions entry for {self.vid_pid_str} software version={software_version}') + check_key(key_ota_url) + check_key(key_software_version_valid) + if entry[key_software_version_valid] and entry[key_ota_url]: + check_key(key_ota_checksum) + check_key(key_ota_checksum_type) + checksum_types = {1: hashlib.sha256, 7: hashlib.sha384, 8: hashlib.sha256, + 10: hashlib.sha3_256, 11: hashlib.sha3_384, 12: hashlib.sha3_512} + if entry[key_ota_checksum_type] not in checksum_types.keys(): + problems.append( + f'OtaChecksumType for entry {self.vid_pid_str} software version={software_version} is invalid. Found {entry[key_ota_checksum_type]} valid values: {checksum_types.keys()}') + continue + checksum = entry[key_ota_checksum] + try: + is_base64 = base64.b64encode(base64.b64decode(checksum)).decode('utf-8') == checksum + except (ValueError, TypeError): + is_base64 = False + if not is_base64: + problems.append( + f"Checksum {checksum} is not base64 encoded for for entry {self.vid_pid_str} software version={software_version}") + continue + + response = requests.get(entry[key_ota_url]) + if not response.ok: + problems.append( + f"Unable to get OTA object from {entry[key_ota_url]} for {self.vid_pid_str} software version = {software_version}") + continue + + ota_len = str(len(response.content)) + dcl_len = entry[key_ota_file_size] + if ota_len != dcl_len: + problems.append( + f'Incorrect OTA size for {self.vid_pid_str} software_version = {software_version}, received size: {len(response.content)} DCL states {entry[key_ota_file_size]}') + continue + + checksum = checksum_types[entry[key_ota_checksum_type]](response.content).digest() + dcl_checksum = base64.b64decode(entry[key_ota_checksum]) + if checksum != dcl_checksum: + problems.append( + f'Incorrect checksum for {self.vid_pid_str} software version = {software_version}, calculated: {checksum}, DCL: {dcl_checksum}') + + msg = 'Problems found in software version DCL checks:\n' + for problem in problems: + msg += f'{problem}\n' + asserts.assert_false(problems, msg) + + +def get_qr() -> str: + qr_code_detector = cv2.QRCodeDetector() + camera_id = 0 + video_capture = cv2.VideoCapture(camera_id) + window_name = 'Post-certification check QR code reader' + qr = '' + while not qr: + ret, frame = video_capture.read() + if ret: + ret_qr, decoded_info, points, _ = qr_code_detector.detectAndDecodeMulti( + frame) + if ret_qr: + for s, p in zip(decoded_info, points): + if s and s.startswith("MT:"): + qr = s + color = (0, 255, 0) + else: + color = (0, 0, 255) + frame = cv2.polylines( + frame, [p.astype(int)], True, color, 8) + cv2.imshow(window_name, frame) + + if (cv2.waitKey(1) & 0xFF == ord('q')): + break + if qr: + time.sleep(1) + break + + cv2.destroyWindow(window_name) + return qr + + +class SetupCodeType(Enum): + UNKNOWN = auto(), + QR = auto(), + MANUAL = auto(), + + +def get_setup_code() -> (str, bool): + ''' Returns the setup code and an enum indicating the code type.''' + while True: + print('Press q for qr code or m for manual code') + pref = input() + if pref in ['q', 'Q']: + return (get_qr(), SetupCodeType.QR) + elif pref in ['m', 'M']: + print('please enter manual code') + m = input() + m = ''.join([i for i in m if m.isnumeric()]) + if len(m) == 11 or len(m) == 21: + return (m, SetupCodeType.MANUAL) + else: + print("Invalid manual code - please try again") + + +class TestConfig(object): + def __init__(self, code: str, code_type: SetupCodeType): + tmp_uuid = str(uuid.uuid4()) + tmpdir_paa = f'paas_{tmp_uuid}' + tmpdir_cd = f'cd_{tmp_uuid}' + self.paa_path = os.path.join('.', tmpdir_paa) + self.cd_path = os.path.join('.', tmpdir_cd) + os.mkdir(self.paa_path) + os.mkdir(self.cd_path) + fetch_paa_certs_from_dcl.fetch_paa_certs(use_main_net_dcld='', use_test_net_dcld='', + use_main_net_http=True, use_test_net_http=False, paa_trust_store_path=tmpdir_paa) + fetch_paa_certs_from_dcl.fetch_cd_signing_certs(tmpdir_cd) + self.admin_storage = f'admin_storage_{tmp_uuid}.json' + global_test_params = {'use_pase_only': True, 'post_cert_test': True} + self.config = MatterTestConfig(endpoint=0, dut_node_ids=[ + 1], global_test_params=global_test_params, storage_path=self.admin_storage) + if code_type == SetupCodeType.QR: + self.config.qr_code_content = code + else: + self.config.manual_code = code + self.config.paa_trust_store_path = Path(self.paa_path) + # Set for DA-1.2, which uses the CD signing certs for verification. This test is now set to use the production CD signing certs from the DCL. + self.config.global_test_params['cd_cert_dir'] = tmpdir_cd + self.stack = MatterStackState(self.config) + self.default_controller = self.stack.certificate_authorities[0].adminList[0].NewController( + nodeId=112233, + paaTrustStorePath=str(self.config.paa_trust_store_path) + ) + + def get_stack(self): + return self.stack + + def get_controller(self): + return self.default_controller + + def get_config(self, tests: list[str]): + self.config.tests = tests + return self.config + + def __enter__(self): + return self + + def __exit__(self, *args): + self.default_controller.Shutdown() + self.stack.Shutdown() + os.remove(self.admin_storage) + shutil.rmtree(self.paa_path) + shutil.rmtree(self.cd_path) + + +def run_test(test_class: MatterBaseTest, tests: typing.List[str], test_config: TestConfig) -> list[str]: + hooks = Hooks() + stack = test_config.get_stack() + controller = test_config.get_controller() + matter_config = test_config.get_config(tests) + ok = run_tests_no_exit(test_class, matter_config, hooks, controller, stack) + if not ok: + print(f"Test failure. Failed on step: {hooks.get_failures()}") + return hooks.get_failures() + + +def run_cert_test(test: str, test_config: TestConfig) -> list[str]: + ''' Runs the specified test, returns a list of failures''' + # for simplicity and because I know the tests we're running follow this pattern, + # just assume the naming convention based off the base name - ie, file and class + # share a name, test is test_classname + module = importlib.import_module(test) + test_class = getattr(module, test) + return run_test(test_class, [f'test_{test}'], test_config) + + +def main(): + code, code_type = get_setup_code() + with TestConfig(code, code_type) as test_config: + # DA-1.2 is a test of the certification declaration + failures_DA_1_2 = run_cert_test('TC_DA_1_2', test_config) + # DA-1.7 is a test of the DAC chain (up to a PAA in the given directory) + failures_DA_1_7 = run_cert_test('TC_DA_1_7', test_config) + + failures_test_event_trigger = run_test(TestEventTriggersCheck, ['test_TestEventTriggersCheck'], test_config) + + # [] means all tests. + failures_dcl = run_test(DclCheck, [], test_config) + + report = [] + for test, failure in failures_DA_1_2.items(): + # Check for known failures first + # step 6.9 - non-production CD + # 9 - not signed by CSA CA + # other steps - should have been caught in cert, but we should report none the less + if failure.step.startswith('6.9'): + report.append('Device is using a non-production certification declaration') + elif failure.step.startswith('9'): + report.append('Device is using a certification declaration that was not signed by the CSA CA') + else: + report.append(f'Device attestation failure: TC-DA-1.2: {failure.step}') + report.append(f'\t{str(failure.exception)}\n') + + for test, failure in failures_DA_1_7.items(): + # Notable failures in DA-1.7: + # 1.3 - PAI signature does not chain to a PAA in the main net DCL + if failure.step.startswith('1.3'): + report.append('Device DAC chain does not chain to a PAA in the main net DCL') + else: + report.append(f'Device attestation failure: TC-DA-1.7: {failure.step}') + report.append(f'\t{str(failure.exception)}\n') + + for test, failure in failures_test_event_trigger.items(): + # only one possible failure here + report.append('Device has test event triggers enabled in production') + report.append(f'\t{str(failure.exception)}\n') + + for test, failure in failures_dcl.items(): + if test == 'test_Vendor': + report.append('Device vendor ID is not present in the DCL') + elif test == 'test_Model': + report.append('Device model is not present in the DCL') + elif test == 'test_Compliance': + report.append('Device compliance information is not present in the DCL') + elif test == 'test_CertifiedModel': + report.append('Device certified model is not present in the DCL') + elif test == 'test_AllSoftwareVersions': + report.append('Problems with device software version in the DCL') + else: + report.append(f'unknown DCL failure in test {test}: {failure.step}') + report.append(f'\t{str(failure.exception)}\n') + + print('\n\n\n') + if report: + print('TEST FAILED:') + for s in report: + print(f'\t{s}') + return 1 + else: + print('TEST PASSED!') + return 0 + + +if __name__ == "__main__": + sys.exit(main())