From 804373327c34b879e7f0b848e10ced58117d29df Mon Sep 17 00:00:00 2001 From: Vivien Nicolas Date: Tue, 29 Aug 2023 20:33:34 +0200 Subject: [PATCH] Hack examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/encoder.py so it does use the right argument names for darwin-framework-tool --- .../matter_chip_tool_adapter/decoder.py | 16 ++++++++++++++ .../matter_chip_tool_adapter/encoder.py | 21 +++++++++++++++++-- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/decoder.py b/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/decoder.py index ec9b4702de0a3d..dc721b24268447 100644 --- a/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/decoder.py +++ b/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/decoder.py @@ -303,6 +303,22 @@ def run(self, specs, value, cluster_name: str, typename: str, array: bool): ) del value[str(field_code)] + # darwin-framework-tool returns the field name but with a different casing than what + # the test suite expects. + # To not confuse the test suite, the field name is replaced by its field name + # equivalent from the spec and then removed. + wrong_casing_field_name = field_name[0].lower( + ) + field_name[1:] + if field_name not in value and field_name[0].upper() == field_name[0] and wrong_casing_field_name in value: + value[field_name] = self.run( + specs, + value[wrong_casing_field_name], + cluster_name, + field_type, + field_array + ) + del value[wrong_casing_field_name] + if specs.is_fabric_scoped(struct): value[_FABRIC_INDEX_FIELD_NAME] = self.run( specs, diff --git a/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/encoder.py b/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/encoder.py index 7a3735e213e049..355a2b18433e31 100644 --- a/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/encoder.py +++ b/examples/chip-tool/py_matter_chip_tool_adapter/matter_chip_tool_adapter/encoder.py @@ -14,7 +14,9 @@ import base64 import json +import os import re +import sys _ANY_COMMANDS_LIST = [ 'ReadById', @@ -208,6 +210,12 @@ class Encoder: def __init__(self, specifications): self.__specs = specifications + # This is not the best way to toggle this flag. But for now it prevents having + # to build a new adapter for the very small differences that exists... + is_darwin_framework_tool = os.path.basename( + sys.argv[0]) == 'darwinframeworktool.py' + self.__is_darwin_framework_tool = is_darwin_framework_tool + def encode(self, request): cluster = self.__get_cluster_name(request) command, command_specifier = self.__get_command_name(request) @@ -305,7 +313,10 @@ def __maybe_add_destination(self, rv, request): if not self._supports_destination(request): return rv - destination_argument_name = 'destination-id' + if self.__is_darwin_framework_tool: + destination_argument_name = 'node-id' + else: + destination_argument_name = 'destination-id' destination_argument_value = None if request.group_id: @@ -333,6 +344,9 @@ def __maybe_add_endpoint(self, rv, request): if (request.is_attribute and not request.command == "writeAttribute") or request.is_event or (request.command in _ANY_COMMANDS_LIST and not request.command == "WriteById"): endpoint_argument_name = 'endpoint-ids' + if self.__is_darwin_framework_tool: + endpoint_argument_name = 'endpoint-id' + if rv: rv += ', ' rv += f'"{endpoint_argument_name}": "{endpoint_argument_value}"' @@ -378,7 +392,10 @@ def __get_argument_name(self, request, entry): if request.is_attribute: if command_name == 'writeAttribute': - argument_name = 'attribute-values' + if self.__is_darwin_framework_tool: + argument_name = 'attr-value' + else: + argument_name = 'attribute-values' else: argument_name = 'value'