diff --git a/scripts/idl/lint/__init__.py b/scripts/idl/lint/__init__.py new file mode 100644 index 00000000000000..503bdf4086f77e --- /dev/null +++ b/scripts/idl/lint/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2022 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .lint_rules_parser import CreateParser diff --git a/scripts/idl/lint/lint_rules_grammar.lark b/scripts/idl/lint/lint_rules_grammar.lark new file mode 100644 index 00000000000000..73c3fe8f43bc66 --- /dev/null +++ b/scripts/idl/lint/lint_rules_grammar.lark @@ -0,0 +1,32 @@ +start: instruction* + +instruction: load_xml|all_endpoint_rule|specific_endpoint_rule + +load_xml: "load" ESCAPED_STRING ";" + +all_endpoint_rule: "all" "endpoints" "{" required_global_attribute* "}" + +specific_endpoint_rule: "endpoint" integer "{" required_server_cluster* "}" + +required_global_attribute: "require" "global" "attribute" id "=" integer ";" + +required_server_cluster: "require" "server" "cluster" id ";" + +integer: positive_integer | negative_integer + +positive_integer: POSITIVE_INTEGER | HEX_INTEGER +negative_integer: "-" positive_integer + +id: ID + +POSITIVE_INTEGER: /\d+/ +HEX_INTEGER: /0x[A-Fa-f0-9]+/ +ID: /[a-zA-Z_][a-zA-Z0-9_]*/ + +%import common.ESCAPED_STRING +%import common.WS +%import common.C_COMMENT +%import common.CPP_COMMENT +%ignore WS +%ignore C_COMMENT +%ignore CPP_COMMENT diff --git a/scripts/idl/lint/lint_rules_parser.py b/scripts/idl/lint/lint_rules_parser.py new file mode 100755 index 00000000000000..7e1d69cc200061 --- /dev/null +++ b/scripts/idl/lint/lint_rules_parser.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python + +import logging +import os +import xml.etree.ElementTree + +from dataclasses import dataclass, field +from typing import List, Optional, Mapping +from lark import Lark +from lark.visitors import Transformer, v_args, Discard +import stringcase +import traceback + +try: + from .types import RequiredAttributesRule, AttributeRequirement, ClusterRequirement +except: + import sys + + sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..")) + from idl.lint.types import RequiredAttributesRule, AttributeRequirement, ClusterRequirement + + +def parseNumberString(n): + if n.startswith('0x'): + return int(n[2:], 16) + else: + return int(n) + + +@dataclass +class RequiredAttribute: + name: str + code: int + + +@dataclass +class DecodedCluster: + name: str + code: int + required_attributes: List[RequiredAttribute] + + +def DecodeClusterFromXml(element: xml.etree.ElementTree.Element): + if element.tag != 'cluster': + logging.error("Not a cluster element: %r" % element) + return None + + # cluster elements contain among other children + # - name (general name for this cluster) + # - code (unique identifier, may be hex or numeric) + # - attribute with side, code and optional attributes + + try: + name = element.find('name').text.replace(' ', '') + required_attributes = [] + + for attr in element.findall('attribute'): + if attr.attrib['side'] != 'server': + continue + + if 'optional' in attr.attrib and attr.attrib['optional'] == 'true': + continue + + required_attributes.append( + RequiredAttribute( + name=attr.text, + code=parseNumberString(attr.attrib['code']) + )) + + return DecodedCluster( + name=name, + code=parseNumberString(element.find('code').text), + required_attributes=required_attributes, + ) + except Exception as e: + logging.exception("Failed to decode cluster %r" % element) + return None + + +def ClustersInXmlFile(path: str): + logging.info("Loading XML from %s" % path) + + # root is expected to be just a "configurator" object + configurator = xml.etree.ElementTree.parse(path).getroot() + for child in configurator: + if child.tag != 'cluster': + continue + yield child + + +class LintRulesContext: + """Represents a context for loadint lint rules. + + Handles: + - loading referenced files (matter xml definitions) + - adding linter rules as data is parsed + - Looking up identifiers for various rules + """ + + def __init__(self): + self._linter_rule = RequiredAttributesRule("Rules file") + + # Map cluster names to the underlying code + self._cluster_codes: Mapping[str, int] = {} + + def GetLinterRules(self): + return [self._linter_rule] + + def RequireAttribute(self, r: AttributeRequirement): + self._linter_rule.RequireAttribute(r) + + def RequireClusterInEndpoint(self, name: str, code: int): + """Mark that a specific cluster is always required in the given endpoint + """ + if name not in self._cluster_codes: + logging.error("UNKNOWN cluster name %s" % name) + logging.error("Known names: %s" % (",".join(self._cluster_codes.keys()), )) + return + + self._linter_rule.RequireClusterInEndpoint(ClusterRequirement( + endpoint_id=code, + cluster_id=self._cluster_codes[name], + cluster_name=name, + )) + + def LoadXml(self, path: str): + """Load XML data from the given path and add it to + internal processing. Adds attribute requirement rules + as needed. + """ + for cluster in ClustersInXmlFile(path): + decoded = DecodeClusterFromXml(cluster) + + if not decoded: + continue + + self._cluster_codes[decoded.name] = decoded.code + + for attr in decoded.required_attributes: + self._linter_rule.RequireAttribute(AttributeRequirement( + code=attr.code, name=attr.name, filter_cluster=decoded.code)) + + # TODO: add cluster ID to internal registry + + +class LintRulesTransformer(Transformer): + """ + A transformer capable to transform data parsed by Lark according to + lint_rules_grammar.lark. + """ + + def __init__(self, file_name: str): + self.context = LintRulesContext() + self.file_name = file_name + + def positive_integer(self, tokens): + """Numbers in the grammar are integers or hex numbers. + """ + if len(tokens) != 1: + raise Error("Unexpected argument counts") + + return parseNumberString(tokens[0].value) + + @v_args(inline=True) + def negative_integer(self, value): + return -value + + @v_args(inline=True) + def integer(self, value): + return value + + def id(self, tokens): + """An id is a string containing an identifier + """ + if len(tokens) != 1: + raise Error("Unexpected argument counts") + return tokens[0].value + + def ESCAPED_STRING(self, s): + # handle escapes, skip the start and end quotes + return s.value[1:-1].encode('utf-8').decode('unicode-escape') + + def start(self, instructions): + # At this point processing is considered done, return all + # linter rules that were found + return self.context.GetLinterRules() + + def instruction(self, instruction): + return Discard + + def all_endpoint_rule(self, attributes): + for attribute in attributes: + self.context.RequireAttribute(attribute) + + return Discard + + @v_args(inline=True) + def load_xml(self, path): + if not os.path.isabs(path): + path = os.path.abspath(os.path.join(os.path.dirname(self.file_name), path)) + + self.context.LoadXml(path) + + @v_args(inline=True) + def required_global_attribute(self, name, code): + return AttributeRequirement(code=code, name=name) + + @v_args(inline=True) + def specific_endpoint_rule(self, code, *names): + for name in names: + self.context.RequireClusterInEndpoint(name, code) + return Discard + + @v_args(inline=True) + def required_server_cluster(self, id): + return id + + +class Parser: + def __init__(self, parser, file_name: str): + self.parser = parser + self.file_name = file_name + + def parse(self): + data = LintRulesTransformer(self.file_name).transform(self.parser.parse(open(self.file_name, "rt").read())) + return data + + +def CreateParser(file_name: str): + """ + Generates a parser that will process a ".matter" file into a IDL + """ + return Parser(Lark.open('lint_rules_grammar.lark', rel_to=__file__, parser='lalr', propagate_positions=True), file_name=file_name) + + +if __name__ == '__main__': + # This Parser is generally not intended to be run as a stand-alone binary. + # The ability to run is for debug and to print out the parsed AST. + import click + import coloredlogs + + # Supported log levels, mapping string values required for argument + # parsing into logging constants + __LOG_LEVELS__ = { + 'debug': logging.DEBUG, + 'info': logging.INFO, + 'warn': logging.WARN, + 'fatal': logging.FATAL, + } + + @click.command() + @click.option( + '--log-level', + default='INFO', + type=click.Choice(__LOG_LEVELS__.keys(), case_sensitive=False), + help='Determines the verbosity of script output.') + @click.argument('filename') + def main(log_level, filename=None): + coloredlogs.install(level=__LOG_LEVELS__[ + log_level], fmt='%(asctime)s %(levelname)-7s %(message)s') + + logging.info("Starting to parse ...") + data = CreateParser(filename).parse() + logging.info("Parse completed") + + logging.info("Data:") + logging.info("%r" % data) + + main() diff --git a/scripts/idl/lint/types.py b/scripts/idl/lint/types.py new file mode 100644 index 00000000000000..5222a4a0bc083e --- /dev/null +++ b/scripts/idl/lint/types.py @@ -0,0 +1,184 @@ +# Copyright (c) 2022 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from idl.matter_idl_types import Idl, ParseMetaData, ClusterSide +from abc import ABC, abstractmethod +from typing import List, Optional +from dataclasses import dataclass, field + + +@dataclass +class LocationInFile: + file_name: str + line: int + column: int + + def __init__(self, file_name: str, meta: ParseMetaData): + self.file_name = file_name + self.line = meta.line + self.column = meta.column + + +@dataclass +class LintError: + """Represents a lint error, potentially at a specific location in a file""" + + message: str + location: Optional[LocationInFile] = field(default=None) + + def __init__(self, text: str, location: Optional[LocationInFile] = None): + self.message = text + if location: + self.message += " at %s:%d:%d" % (location.file_name, location.line, location.column) + + def __str__(self): + return self.message + + +class LintRule(ABC): + """Validates a linter rule on an idl""" + + def __init__(self, name): + self.name = name + + @abstractmethod + def LintIdl(self, idl: Idl) -> List[LintError]: + """Runs the linter on the given IDL and returns back any errors it may find""" + pass + + +@dataclass +class AttributeRequirement: + """Contains information about a required attribute""" + code: int # required attributes are searched by codes + name: str # the name of this attribute. Expect it to be exposed properly + + # Optional filters to apply to specific locations + filter_cluster: Optional[int] = field(default=None) + + +@dataclass +class ClusterRequirement: + endpoint_id: int + cluster_id: int + cluster_name: str + + +class RequiredAttributesRule(LintRule): + def __init__(self, name): + super(RequiredAttributesRule, self).__init__(name) + self._lint_errors = [] + self._idl = None + + # Map attribute code to name + self._mandatory_attributes: List[AttributeRequirement] = [] + self._mandatory_clusters: List[ClusterRequirement] = [] + + def __repr__(self): + result = "RequiredAttributesRule{\n" + + if self._mandatory_attributes: + result += " mandatory_attributes:\n" + for attr in self._mandatory_attributes: + result += " - %r\n" % attr + + result += "}" + return result + + def RequireAttribute(self, attr: AttributeRequirement): + """Mark an attribute required""" + self._mandatory_attributes.append(attr) + + def RequireClusterInEndpoint(self, requirement: ClusterRequirement): + self._mandatory_clusters.append(requirement) + + def _ParseLocation(self, meta: Optional[ParseMetaData]) -> Optional[LocationInFile]: + """Create a location in the current file that is being parsed. """ + if not meta or not self._idl.parse_file_name: + return None + return LocationInFile(self._idl.parse_file_name, meta) + + def _AddLintError(self, text, location): + self._lint_errors.append(LintError("%s: %s" % (self.name, text), location)) + + def _ServerClusterDefinition(self, name: str, location: Optional[LocationInFile]): + """Finds the server cluster definition with the given name. + + On error returns None and _lint_errors is updated internlly + """ + cluster_definition = [ + c for c in self._idl.clusters if c.name == name and c.side == ClusterSide.SERVER + ] + if not cluster_definition: + self._AddLintError("Cluster definition for %s not found" % cluster.name, location) + return None + + if len(cluster_definition) > 1: + self._AddLintError("Multiple cluster definitions found for %s" % cluster.name, location) + return None + + return cluster_definition[0] + + def _LintImpl(self): + for endpoint in self._idl.endpoints: + + cluster_codes = set() + + for cluster in endpoint.server_clusters: + cluster_definition = self._ServerClusterDefinition(cluster.name, self._ParseLocation(cluster.parse_meta)) + if not cluster_definition: + continue + + cluster_codes.add(cluster_definition.code) + + # Cluster contains enabled attributes by name + # cluster_definition contains the definition of the attributes themseves + # + # Join the two to receive attribute codes + name_to_code_map = {} + for attr in cluster_definition.attributes: + name_to_code_map[attr.definition.name] = attr.definition.code + + attribute_codes = set() + # For all the instantiated attributes, figure out their code + for attr in cluster.attributes: + if attr.name not in name_to_code_map: + self._AddLintError("Could not find attribute defintion (no code) for %s:%s" % + (cluster.name, attr.name), self._ParseLocation(cluster.parse_meta)) + continue + + attribute_codes.add(name_to_code_map[attr.name]) + + # Linting codes now + for check in self._mandatory_attributes: + if check.filter_cluster is not None and check.filter_cluster != cluster_definition.code: + continue + + if check.code not in attribute_codes: + self._AddLintError("EP%d:%s does not expose %s(%d) attribute" % + (endpoint.number, cluster.name, check.name, check.code), self._ParseLocation(cluster.parse_meta)) + + for requirement in self._mandatory_clusters: + if requirement.endpoint_id != endpoint.number: + continue + + if requirement.cluster_id not in cluster_codes: + self._AddLintError("Endpoint %d does not expose cluster %s (%d)" % + (requirement.endpoint_id, requirement.cluster_name, requirement.cluster_id), location=None) + + def LintIdl(self, idl: Idl) -> List[LintError]: + self._idl = idl + self._lint_errors = [] + self._LintImpl() + return self._lint_errors diff --git a/scripts/idl/matter_idl_parser.py b/scripts/idl/matter_idl_parser.py index 3531c94495de2c..37b001e5cca82d 100755 --- a/scripts/idl/matter_idl_parser.py +++ b/scripts/idl/matter_idl_parser.py @@ -62,9 +62,11 @@ class MatterIdlTransformer(Transformer): Actual parametes to the methods depend on the rules multiplicity and/or optionality. - """ + def __init__(self, skip_meta): + self.skip_meta = skip_meta + def positive_integer(self, tokens): """Numbers in the grammar are integers or hex numbers. """ @@ -268,9 +270,10 @@ def persist_attribute(self, _): def callback_attribute(self, _): return AttributeStorage.CALLBACK - @v_args(inline=True) - def endpoint_attribute_instantiation(self, storage, id, default=None): - return AttributeInstantiation(name=id, storage=storage, default=default) + @v_args(meta=True, inline=True) + def endpoint_attribute_instantiation(self, meta, storage, id, default=None): + meta = None if self.skip_meta else ParseMetaData(meta) + return AttributeInstantiation(parse_meta=meta, name=id, storage=storage, default=default) def ESCAPED_STRING(self, s): # handle escapes, skip the start and end quotes @@ -315,13 +318,16 @@ def endpoint(self, number, *transforms): def endpoint_cluster_binding(self, id): return AddBindingToEndpointTransform(id) - @v_args(inline=True) - def endpoint_server_cluster(self, id, *attributes): - return AddServerClusterToEndpointTransform(ServerClusterInstantiation(name=id, attributes=list(attributes))) + @v_args(meta=True, inline=True) + def endpoint_server_cluster(self, meta, id, *attributes): + meta = None if self.skip_meta else ParseMetaData(meta) + return AddServerClusterToEndpointTransform(ServerClusterInstantiation(parse_meta=meta, name=id, attributes=list(attributes))) - @v_args(inline=True) - def cluster(self, side, name, code, *content): - result = Cluster(side=side, name=name, code=code) + @v_args(inline=True, meta=True) + def cluster(self, meta, side, name, code, *content): + meta = None if self.skip_meta else ParseMetaData(meta) + + result = Cluster(parse_meta=meta, side=side, name=name, code=code) for item in content: if type(item) == Enum: @@ -359,11 +365,22 @@ def idl(self, items): return idl -def CreateParser(): +class ParserWithLines: + def __init__(self, parser, skip_meta: bool): + self.parser = parser + self.skip_meta = skip_meta + + def parse(self, file, file_name: str = None): + idl = MatterIdlTransformer(self.skip_meta).transform(self.parser.parse(file)) + idl.parse_file_name = file_name + return idl + + +def CreateParser(skip_meta: bool = False): """ Generates a parser that will process a ".matter" file into a IDL """ - return Lark.open('matter_grammar.lark', rel_to=__file__, start='idl', parser='lalr', transformer=MatterIdlTransformer()) + return ParserWithLines(Lark.open('matter_grammar.lark', rel_to=__file__, start='idl', parser='lalr', propagate_positions=True), skip_meta) if __name__ == '__main__': @@ -394,7 +411,7 @@ def main(log_level, filename=None): log_level], fmt='%(asctime)s %(levelname)-7s %(message)s') logging.info("Starting to parse ...") - data = CreateParser().parse(open(filename).read()) + data = CreateParser().parse(open(filename).read(), file_name=filename) logging.info("Parse completed") logging.info("Data:") diff --git a/scripts/idl/matter_idl_types.py b/scripts/idl/matter_idl_types.py index f56bd5e225f1e8..cac66d502a545b 100644 --- a/scripts/idl/matter_idl_types.py +++ b/scripts/idl/matter_idl_types.py @@ -1,9 +1,26 @@ import enum +from lark.tree import Meta from dataclasses import dataclass, field from typing import List, Set, Optional, Union +# Information about parsing location for specific items +# Helpful when referencing data items in logs when processing +@dataclass +class ParseMetaData: + line: int + column: int + + def __init__(self, meta: Meta = None, line: int = None, column: int = None): + if meta: + self.line = meta.line + self.column = meta.column + else: + self.line = line + self.column = column + + class FieldAttribute(enum.Enum): OPTIONAL = enum.auto() NULLABLE = enum.auto() @@ -167,6 +184,9 @@ class Cluster: structs: List[Struct] = field(default_factory=list) commands: List[Command] = field(default_factory=list) + # Parsing meta data missing only when skip meta data is requested + parse_meta: Optional[ParseMetaData] = field(default=None) + @dataclass class AttributeInstantiation: @@ -174,12 +194,18 @@ class AttributeInstantiation: storage: AttributeStorage default: Optional[Union[str, int, bool]] = None + # Parsing meta data missing only when skip meta data is requested + parse_meta: Optional[ParseMetaData] = field(default=None) + @dataclass class ServerClusterInstantiation: name: str attributes: List[AttributeInstantiation] = field(default_factory=list) + # Parsing meta data missing only when skip meta data is requested + parse_meta: Optional[ParseMetaData] = field(default=None) + @dataclass class Endpoint: @@ -195,3 +221,6 @@ class Idl: structs: List[Struct] = field(default_factory=list) clusters: List[Cluster] = field(default_factory=list) endpoints: List[Endpoint] = field(default_factory=list) + + # IDL file name is available only if parsing provides a file name + parse_file_name: Optional[str] = field(default=None) diff --git a/scripts/idl/test_matter_idl_parser.py b/scripts/idl/test_matter_idl_parser.py index 273da932ae8c4f..63db44bde458bd 100755 --- a/scripts/idl/test_matter_idl_parser.py +++ b/scripts/idl/test_matter_idl_parser.py @@ -29,7 +29,7 @@ def parseText(txt): - return CreateParser().parse(txt) + return CreateParser(skip_meta=True).parse(txt) class TestParser(unittest.TestCase): @@ -342,6 +342,20 @@ def test_cluster_event_acl(self): ])]) self.assertEqual(actual, expected) + def test_parsing_metadata_for_cluster(self): + actual = CreateParser(skip_meta=False).parse(""" +server cluster A = 1 { /* Test comment */ } + +// some empty lines and then indented + client cluster B = 2 { } + """) + + expected = Idl(clusters=[ + Cluster(parse_meta=ParseMetaData(line=2, column=1), side=ClusterSide.SERVER, name="A", code=1), + Cluster(parse_meta=ParseMetaData(line=5, column=4), side=ClusterSide.CLIENT, name="B", code=2), + ]) + self.assertEqual(actual, expected) + def test_multiple_clusters(self): actual = parseText(""" server cluster A = 1 { /* Test comment */ } diff --git a/scripts/idl_lint.py b/scripts/idl_lint.py new file mode 100755 index 00000000000000..01b8413d8c9fd6 --- /dev/null +++ b/scripts/idl_lint.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# Copyright (c) 2022 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click +import coloredlogs +import enum +import logging +import os +import sys + +from typing import List, Optional + +try: + from idl import matter_idl_parser +except: + sys.path.append(os.path.abspath(os.path.dirname(__file__))) + from idl import matter_idl_parser + +import idl.lint + +# Supported log levels, mapping string values required for argument +# parsing into logging constants +__LOG_LEVELS__ = { + "debug": logging.DEBUG, + "info": logging.INFO, + "warn": logging.WARN, + "fatal": logging.FATAL, +} + + +@click.command() +@click.option( + "--log-level", + default="INFO", + type=click.Choice(__LOG_LEVELS__.keys(), case_sensitive=False), + help="Determines the verbosity of script output", +) +@click.option( + "--rules", + default=os.path.abspath(os.path.join(os.path.dirname(__file__), "rules.matterlint")), + type=click.Path(exists=True), + help="Rules file to use", +) +@click.argument("idl_path", type=click.Path(exists=True)) +def main(log_level, rules, idl_path): + """ + Lints MATTER IDL files (.matter) using given RULES + """ + coloredlogs.install( + level=__LOG_LEVELS__[log_level], fmt="%(asctime)s %(levelname)-7s %(message)s" + ) + + lint_rules = [] + logging.info("Loading rules from %s" % rules) + lint_rules.extend(idl.lint.CreateParser(rules).parse()) + + logging.info("Parsing idl from %s" % idl_path) + idl_tree = matter_idl_parser.CreateParser().parse(open(idl_path, "rt").read(), file_name=idl_path) + + logging.info("Running %d lint rules" % len(lint_rules)) + + errors = [] + for rule in lint_rules: + logging.info(" Running %s" % rule.name) + errors.extend(rule.LintIdl(idl_tree)) + logging.info("Done") + + for e in errors: + logging.error("ERROR: %s" % e) + + if errors: + logging.error("Found %d lint errors" % len(errors)) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/scripts/rules.matterlint b/scripts/rules.matterlint new file mode 100644 index 00000000000000..b2fa3245788f1e --- /dev/null +++ b/scripts/rules.matterlint @@ -0,0 +1,87 @@ +// These are global .matter idl linter rules +// First load data from XML definitions + +load "../src/app/zap-templates/zcl/data-model/chip/access-control-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/access-control-definitions.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/account-login-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/administrator-commissioning-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/application-basic-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/application-launcher-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/audio-output-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/basic-information-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/binding-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/boolean-state-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/bridged-actions-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/bridged-device-basic.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/channel-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/chip-ota.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/chip-types.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/clusters-extensions.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/content-launch-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/descriptor-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/diagnostic-logs-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/door-lock-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/ethernet-network-diagnostics-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/fixed-label-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/flow-measurement-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/general-commissioning-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/general-diagnostics-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/global-attributes.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/group-key-mgmt-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/identify-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/illuminance-measurement-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/keypad-input-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/localization-configuration-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/low-power-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/matter-devices.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/media-input-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/media-playback-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/mode-select-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/network-commissioning-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/onoff-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/operational-credentials-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/power-source-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/power-source-configuration-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/pressure-measurement-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/proxy-configuration-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/proxy-discovery-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/proxy-valid-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/pump-configuration-and-control-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/pwm-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/relative-humidity-measurement-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/scene.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/software-diagnostics-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/switch-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/target-navigator-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/temperature-measurement-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/test-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/thermostat-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/thread-network-diagnostics-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/time-format-localization-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/time-synchronization-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/unit-localization-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/user-label-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/wake-on-lan-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/wifi-network-diagnostics-cluster.xml"; +load "../src/app/zap-templates/zcl/data-model/chip/window-covering.xml"; + +all endpoints { + // These attributes follow a different code path and do not have to be + // present in the .matter file + // + // require global attribute generatedCommandList = 65528; + // require global attribute acceptedCommandList = 65529; + // require global attribute attributeList = 65531; + + require global attribute featureMap = 65532; + require global attribute clusterRevision = 65533; +} + +endpoint 0 { + // Identifiers for clusters are loaded from XML files + require server cluster Basic; + require server cluster GroupKeyManagement; + require server cluster NetworkCommissioning; + require server cluster GeneralCommissioning; + require server cluster OTASoftwareUpdateRequestor; +}