diff --git a/.github/.wordlist.txt b/.github/.wordlist.txt index cfaa48864f70e3..27a823c9e786ac 100644 --- a/.github/.wordlist.txt +++ b/.github/.wordlist.txt @@ -177,6 +177,7 @@ CaseAdminNode CatalogVendorId CBB cbd +CBOR ccf CCMP CCS @@ -459,6 +460,7 @@ emberAfExternalAttributeReadCallback emberAfExternalAttributeWriteCallback EnableNetwork EnableWiFiNetwork +endian EndpointId endpointName endsWith @@ -1177,6 +1179,7 @@ smoketest SMP socat socio +SoCs SodiumConcentrationMeasurement softap SoftDevice @@ -1365,6 +1368,7 @@ utils UUID ux VCP +validator Vectorcall VendorID VendorName diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index cd99077fdc9300..82d8c50458d78f 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -35,7 +35,7 @@ jobs: image: connectedhomeip/chip-build:0.5.79 volumes: - "/tmp/log_output:/tmp/test_logs" - options: --sysctl "net.ipv6.conf.all.disable_ipv6=0 + options: --privileged --sysctl "net.ipv6.conf.all.disable_ipv6=0 net.ipv4.conf.all.forwarding=1 net.ipv6.conf.all.forwarding=1" steps: @@ -57,12 +57,17 @@ jobs: attempt_delay: 2000 - name: Checkout submodules run: scripts/checkout_submodules.py --shallow --platform linux + - name: Try to ensure the directories for core dumping exist and we + can write them. + run: | + mkdir /tmp/cores || true + sysctl -w kernel.core_pattern=/tmp/cores/core.%u.%p.%t || true - name: Bootstrap timeout-minutes: 10 run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -99,6 +104,22 @@ jobs: - name: Run Build Without Error Logging timeout-minutes: 20 run: scripts/run_in_build_env.sh "ninja -C ./out" + - name: Uploading core files + uses: actions/upload-artifact@v2 + if: ${{ failure() && !env.ACT }} + with: + name: crash-core-linux-gcc-debug + path: /tmp/cores/ + # Cores are big; don't hold on to them too long. + retention-days: 5 + - name: Uploading objdir for debugging + uses: actions/upload-artifact@v2 + if: ${{ failure() && !env.ACT }} + with: + name: crash-objdir-linux-gcc-debug + path: out/ + # objdirs are big; don't hold on to them too long. + retention-days: 5 build_linux: name: Build on Linux (fake, gcc_release, clang, simulated) timeout-minutes: 120 @@ -110,7 +131,7 @@ jobs: image: connectedhomeip/chip-build:0.5.79 volumes: - "/tmp/log_output:/tmp/test_logs" - options: --sysctl "net.ipv6.conf.all.disable_ipv6=0 + options: --privileged --sysctl "net.ipv6.conf.all.disable_ipv6=0 net.ipv4.conf.all.forwarding=1 net.ipv6.conf.all.forwarding=1" steps: @@ -137,12 +158,17 @@ jobs: # languages: "cpp" - name: Checkout submodules run: scripts/checkout_submodules.py --shallow --platform linux + - name: Try to ensure the directories for core dumping exist and we + can write them. + run: | + mkdir /tmp/cores || true + sysctl -w kernel.core_pattern=/tmp/cores/core.%u.%p.%t || true - name: Bootstrap timeout-minutes: 10 run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -211,6 +237,22 @@ jobs: run: | ./scripts/run_in_build_env.sh \ "./scripts/build/build_examples.py --no-log-timestamps --target linux-fake-tests build" + - name: Uploading core files + uses: actions/upload-artifact@v2 + if: ${{ failure() && !env.ACT }} + with: + name: crash-core-linux + path: /tmp/cores/ + # Cores are big; don't hold on to them too long. + retention-days: 5 + - name: Uploading objdir for debugging + uses: actions/upload-artifact@v2 + if: ${{ failure() && !env.ACT }} + with: + name: crash-objdir-linux + path: out/ + # objdirs are big; don't hold on to them too long. + retention-days: 5 # TODO Log Upload https://github.com/project-chip/connectedhomeip/issues/2227 # TODO https://github.com/project-chip/connectedhomeip/issues/1512 @@ -317,7 +359,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -365,7 +407,7 @@ jobs: " - name: Uploading diagnostic logs uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-log-darwin path: ~/Library/Logs/DiagnosticReports/ diff --git a/.github/workflows/cirque.yaml b/.github/workflows/cirque.yaml index cc8f6528d6520c..a14b460375db55 100644 --- a/.github/workflows/cirque.yaml +++ b/.github/workflows/cirque.yaml @@ -127,7 +127,7 @@ jobs: -- scripts/tests/cirque_tests.sh run_all_tests - name: Uploading Binaries uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: cirque_log-${{steps.outsuffix.outputs.value}}-logs path: /tmp/cirque_test_output/ diff --git a/.github/workflows/darwin-tests.yaml b/.github/workflows/darwin-tests.yaml index b4c8efdd639e60..026071e76a9e48 100644 --- a/.github/workflows/darwin-tests.yaml +++ b/.github/workflows/darwin-tests.yaml @@ -20,8 +20,7 @@ on: workflow_dispatch: concurrency: - group: - ${{ github.ref }}-${{ github.workflow }}-${{ (github.event_name == + group: ${{ github.ref }}-${{ github.workflow }}-${{ (github.event_name == 'pull_request' && github.event.number) || (github.event_name == 'workflow_dispatch' && github.run_number) || github.sha }} cancel-in-progress: true @@ -74,65 +73,84 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: - name: - bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: | .environment/gn_out/.ninja_log .environment/pigweed-venv/*.log - - name: Build Apps - timeout-minutes: 60 + - name: Delete Defaults + run: defaults delete com.apple.dt.xctest.tool + continue-on-error: true + - name: Run macOS Build + timeout-minutes: 40 + # Enable -Werror by hand here, because the Xcode config can't + # enable it for various reasons. Keep whatever Xcode settings + # for OTHER_CFLAGS exist by using ${inherited}. + # + # Disable -Wmacro-redefined because CHIP_DEVICE_CONFIG_ENABLE_MDNS + # seems to be unconditionally defined in CHIPDeviceBuildConfig.h, + # which is apparently being included after CHIPDeviceConfig.h. + run: xcodebuild -target "Matter" -sdk macosx OTHER_CFLAGS='${inherited} -Werror -Wno-macro-redefined' + working-directory: src/darwin/Framework + - name: Copying Framework to Temporary Path + continue-on-error: true run: | - ./scripts/run_in_build_env.sh \ - "./scripts/build/build_examples.py \ - --target darwin-x64-darwin-framework-tool-${BUILD_VARIANT} \ - --target darwin-x64-all-clusters-${BUILD_VARIANT} \ - --target darwin-x64-lock-${BUILD_VARIANT} \ - --target darwin-x64-ota-provider-${BUILD_VARIANT} \ - --target darwin-x64-ota-requestor-${BUILD_VARIANT} \ - --target darwin-x64-tv-app-${BUILD_VARIANT} \ - build \ - --copy-artifacts-to objdir-clone \ - " - - name: Run Tests - timeout-minutes: 60 - run: | - ./scripts/run_in_build_env.sh \ - "./scripts/tests/run_test_suite.py \ - --chip-tool ./out/darwin-x64-darwin-framework-tool-${BUILD_VARIANT}/darwin-framework-tool \ - --target-skip-glob '{TestGroupMessaging}' \ - run \ - --iterations 1 \ - --test-timeout-seconds 120 \ - --all-clusters-app ./out/darwin-x64-all-clusters-${BUILD_VARIANT}/chip-all-clusters-app \ - --lock-app ./out/darwin-x64-lock-${BUILD_VARIANT}/chip-lock-app \ - --ota-provider-app ./out/darwin-x64-ota-provider-${BUILD_VARIANT}/chip-ota-provider-app \ - --ota-requestor-app ./out/darwin-x64-ota-requestor-${BUILD_VARIANT}/chip-ota-requestor-app \ - --tv-app ./out/darwin-x64-tv-app-${BUILD_VARIANT}/chip-tv-app \ - " + mkdir -p /tmp/macos_framework_output + ls -la /Users/runner/work/connectedhomeip/connectedhomeip/src/darwin/Framework/build/Release/ + mv /Users/runner/work/connectedhomeip/connectedhomeip/src/darwin/Framework/build/Release/Matter.framework /tmp/macos_framework_output + ls -la /tmp/macos_framework_output + # Disabling for now + # + # - name: Build Apps + # timeout-minutes: 60 + # run: | + # ./scripts/run_in_build_env.sh \ + # "./scripts/build/build_examples.py \ + # --target darwin-x64-darwin-framework-tool-${BUILD_VARIANT} \ + # --target darwin-x64-all-clusters-${BUILD_VARIANT} \ + # --target darwin-x64-lock-${BUILD_VARIANT} \ + # --target darwin-x64-ota-provider-${BUILD_VARIANT} \ + # --target darwin-x64-ota-requestor-${BUILD_VARIANT} \ + # --target darwin-x64-tv-app-${BUILD_VARIANT} \ + # build \ + # --copy-artifacts-to objdir-clone \ + # " + # - name: Run Tests + # timeout-minutes: 60 + # run: | + # ./scripts/run_in_build_env.sh \ + # "./scripts/tests/run_test_suite.py \ + # --chip-tool ./out/darwin-x64-darwin-framework-tool-${BUILD_VARIANT}/darwin-framework-tool \ + # --target-skip-glob '{TestGroupMessaging}' \ + # run \ + # --iterations 1 \ + # --test-timeout-seconds 120 \ + # --all-clusters-app ./out/darwin-x64-all-clusters-${BUILD_VARIANT}/chip-all-clusters-app \ + # --lock-app ./out/darwin-x64-lock-${BUILD_VARIANT}/chip-lock-app \ + # --ota-provider-app ./out/darwin-x64-ota-provider-${BUILD_VARIANT}/chip-ota-provider-app \ + # --ota-requestor-app ./out/darwin-x64-ota-requestor-${BUILD_VARIANT}/chip-ota-requestor-app \ + # --tv-app ./out/darwin-x64-tv-app-${BUILD_VARIANT}/chip-tv-app \ + # " - name: Uploading core files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-core-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: crash-core-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: /cores/ # Cores are big; don't hold on to them too long. retention-days: 5 - name: Uploading diagnostic logs uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-log-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: crash-log-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: ~/Library/Logs/DiagnosticReports/ - name: Uploading objdir for debugging uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-objdir-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: crash-objdir-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: objdir-clone/ # objdirs are big; don't hold on to them too long. retention-days: 5 diff --git a/.github/workflows/darwin.yaml b/.github/workflows/darwin.yaml index 258d2d66336833..f1c87eb9314547 100644 --- a/.github/workflows/darwin.yaml +++ b/.github/workflows/darwin.yaml @@ -57,7 +57,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -66,11 +66,11 @@ jobs: - name: Run iOS Build Debug timeout-minutes: 30 working-directory: src/darwin/Framework - run: xcodebuild -target "CHIP" -sdk iphoneos + run: xcodebuild -target "Matter" -sdk iphoneos - name: Run iOS Build Release timeout-minutes: 30 working-directory: src/darwin/Framework - run: xcodebuild -target "CHIP" -sdk iphoneos -configuration Release + run: xcodebuild -target "Matter" -sdk iphoneos -configuration Release - name: Clean Build run: xcodebuild clean working-directory: src/darwin/Framework @@ -78,7 +78,7 @@ jobs: run: defaults delete com.apple.dt.xctest.tool continue-on-error: true - name: Run macOS Build - timeout-minutes: 30 + timeout-minutes: 40 # Enable -Werror by hand here, because the Xcode config can't # enable it for various reasons. Keep whatever Xcode settings # for OTHER_CFLAGS exist by using ${inherited}. @@ -86,15 +86,23 @@ jobs: # Disable -Wmacro-redefined because CHIP_DEVICE_CONFIG_ENABLE_MDNS # seems to be unconditionally defined in CHIPDeviceBuildConfig.h, # which is apparently being included after CHIPDeviceConfig.h. - run: xcodebuild -target "CHIP" -sdk macosx OTHER_CFLAGS='${inherited} -Werror -Wno-macro-redefined' + run: xcodebuild -target "Matter" -sdk macosx OTHER_CFLAGS='${inherited} -Werror -Wno-macro-redefined' working-directory: src/darwin/Framework + - name: Copying Framework to Temporary Path + continue-on-error: true + run: | + mkdir -p /tmp/macos_framework_output + ls -la /Users/runner/work/connectedhomeip/connectedhomeip/src/darwin/Framework/build/Release/ + mv /Users/runner/work/connectedhomeip/connectedhomeip/src/darwin/Framework/build/Release/Matter.framework /tmp/macos_framework_output + ls -la /tmp/macos_framework_output - name: Clean Build run: xcodebuild clean working-directory: src/darwin/Framework - - name: Build example darwin-framework-tool - timeout-minutes: 15 - run: | - scripts/examples/gn_build_example.sh examples/darwin-framework-tool out/debug chip_config_network_layer_ble=false is_asan=true + # Disabling for now + # - name: Build example darwin-framework-tool + # timeout-minutes: 15 + # run: | + # scripts/examples/gn_build_example.sh examples/darwin-framework-tool out/debug chip_config_network_layer_ble=false is_asan=true - name: Build example All Clusters Server timeout-minutes: 15 run: | @@ -115,11 +123,11 @@ jobs: run: | mkdir -p /tmp/darwin/framework-tests ../../../out/debug/chip-all-clusters-app --interface-id -1 > >(tee /tmp/darwin/framework-tests/all-cluster-app.log) 2> >(tee /tmp/darwin/framework-tests/all-cluster-app-err.log >&2) & - xcodebuild test -target "CHIP" -scheme "CHIP Framework Tests" -sdk macosx OTHER_CFLAGS='${inherited} -Werror -Wno-incomplete-umbrella' > >(tee /tmp/darwin/framework-tests/darwin-tests.log) 2> >(tee /tmp/darwin/framework-tests/darwin-tests-err.log >&2) + xcodebuild test -target "Matter" -scheme "Matter Framework Tests" -sdk macosx OTHER_CFLAGS='${inherited} -Werror -Wno-incomplete-umbrella' > >(tee /tmp/darwin/framework-tests/darwin-tests.log) 2> >(tee /tmp/darwin/framework-tests/darwin-tests-err.log >&2) working-directory: src/darwin/Framework - name: Uploading log files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: darwin-framework-test-logs path: /tmp/darwin/framework-tests diff --git a/.github/workflows/examples-cc13x2x7_26x2x7.yaml b/.github/workflows/examples-cc13x2x7_26x2x7.yaml index 885133547c3d54..4c7b67c2c9bb77 100644 --- a/.github/workflows/examples-cc13x2x7_26x2x7.yaml +++ b/.github/workflows/examples-cc13x2x7_26x2x7.yaml @@ -58,7 +58,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-cyw30739.yaml b/.github/workflows/examples-cyw30739.yaml index 458833b33bdfec..a3126dbba0442c 100644 --- a/.github/workflows/examples-cyw30739.yaml +++ b/.github/workflows/examples-cyw30739.yaml @@ -57,7 +57,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-efr32.yaml b/.github/workflows/examples-efr32.yaml index 262d9b771795bc..2324317338fe8c 100644 --- a/.github/workflows/examples-efr32.yaml +++ b/.github/workflows/examples-efr32.yaml @@ -61,7 +61,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-esp32.yaml b/.github/workflows/examples-esp32.yaml index 61190b3a763a04..7751329753370a 100644 --- a/.github/workflows/examples-esp32.yaml +++ b/.github/workflows/examples-esp32.yaml @@ -59,7 +59,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -140,7 +140,7 @@ jobs: - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-infineon.yaml b/.github/workflows/examples-infineon.yaml index 5f381cc180cd5e..e9d038863669c8 100644 --- a/.github/workflows/examples-infineon.yaml +++ b/.github/workflows/examples-infineon.yaml @@ -57,7 +57,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-k32w.yaml b/.github/workflows/examples-k32w.yaml index e43db4cef5c77b..3518ae8ea99339 100644 --- a/.github/workflows/examples-k32w.yaml +++ b/.github/workflows/examples-k32w.yaml @@ -60,7 +60,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-linux-arm.yaml b/.github/workflows/examples-linux-arm.yaml index 369500947303e5..d86f9937f2c76d 100644 --- a/.github/workflows/examples-linux-arm.yaml +++ b/.github/workflows/examples-linux-arm.yaml @@ -58,7 +58,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-linux-standalone.yaml b/.github/workflows/examples-linux-standalone.yaml index 2e6e0edb3f40e4..7312b61daccdaf 100644 --- a/.github/workflows/examples-linux-standalone.yaml +++ b/.github/workflows/examples-linux-standalone.yaml @@ -61,7 +61,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-mbed.yaml b/.github/workflows/examples-mbed.yaml index 65bf46ebfcdf71..15c12fd4f0f576 100644 --- a/.github/workflows/examples-mbed.yaml +++ b/.github/workflows/examples-mbed.yaml @@ -75,7 +75,7 @@ jobs: - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/examples-nrfconnect.yaml b/.github/workflows/examples-nrfconnect.yaml index b15ad7d4aa1da0..7d9824a8db0dfa 100644 --- a/.github/workflows/examples-nrfconnect.yaml +++ b/.github/workflows/examples-nrfconnect.yaml @@ -74,7 +74,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -196,7 +196,7 @@ jobs: scripts/run_in_build_env.sh "./scripts/build/build_examples.py --target nrf-native-posix-64-tests build" - name: Uploading Failed Test Logs uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: test-log path: | diff --git a/.github/workflows/examples-qpg.yaml b/.github/workflows/examples-qpg.yaml index e1e4b40b1f6d3c..d772919707d131 100644 --- a/.github/workflows/examples-qpg.yaml +++ b/.github/workflows/examples-qpg.yaml @@ -60,7 +60,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/full-android.yaml b/.github/workflows/full-android.yaml index 09960d78aaba1e..e2cb8a68c607d6 100644 --- a/.github/workflows/full-android.yaml +++ b/.github/workflows/full-android.yaml @@ -53,7 +53,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/fuzzing-build.yaml b/.github/workflows/fuzzing-build.yaml index dedc1b67b109c4..4fbead5e450e4d 100644 --- a/.github/workflows/fuzzing-build.yaml +++ b/.github/workflows/fuzzing-build.yaml @@ -55,7 +55,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -113,7 +113,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/qemu.yaml b/.github/workflows/qemu.yaml index 60f620195025d4..9374bb6ee0b7e0 100644 --- a/.github/workflows/qemu.yaml +++ b/.github/workflows/qemu.yaml @@ -54,7 +54,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/release_artifacts.yaml b/.github/workflows/release_artifacts.yaml index de38d645157cd8..199b2bc7543942 100644 --- a/.github/workflows/release_artifacts.yaml +++ b/.github/workflows/release_artifacts.yaml @@ -47,7 +47,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | @@ -92,7 +92,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/smoketest-android.yaml b/.github/workflows/smoketest-android.yaml index dc800768554bd3..239f32fd7ff562 100644 --- a/.github/workflows/smoketest-android.yaml +++ b/.github/workflows/smoketest-android.yaml @@ -54,7 +54,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 30ab4616ac4b3d..351d50ed7e32e0 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -69,7 +69,7 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: | @@ -106,7 +106,7 @@ jobs: " - name: Uploading core files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-core-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: /tmp/cores/ @@ -114,7 +114,7 @@ jobs: retention-days: 5 - name: Uploading objdir for debugging uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-objdir-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: objdir-clone/ @@ -170,14 +170,14 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: | .environment/gn_out/.ninja_log .environment/pigweed-venv/*.log - name: Build Apps - timeout-minutes: 40 + timeout-minutes: 60 run: | ./scripts/run_in_build_env.sh \ "./scripts/build/build_examples.py \ @@ -196,7 +196,7 @@ jobs: ./scripts/run_in_build_env.sh \ "./scripts/tests/run_test_suite.py \ --chip-tool ./out/darwin-x64-chip-tool${CHIP_TOOL_VARIANT}-${BUILD_VARIANT}/chip-tool \ - --target-skip-glob '{TestGroupMessaging,Test_TC_DIAG_TH_NW_1_1,Test_TC_DIAG_TH_NW_1_2,Test_TC_DIAG_TH_NW_2_2,Test_TC_DIAG_TH_NW_2_3,Test_TC_DIAG_TH_NW_2_6,Test_TC_DIAG_TH_NW_2_7,Test_TC_DIAG_TH_NW_2_8,Test_TC_DIAG_TH_NW_2_9}' \ + --target-skip-glob '{TestGroupMessaging,Test_TC_DIAG_TH_NW_2_1,Test_TC_DIAG_TH_NW_2_2,Test_TC_DIAG_TH_NW_2_3,Test_TC_DIAG_TH_NW_2_4}' \ run \ --iterations 1 \ --test-timeout-seconds 120 \ @@ -208,7 +208,7 @@ jobs: " - name: Uploading core files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-core-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: /cores/ @@ -216,13 +216,13 @@ jobs: retention-days: 5 - name: Uploading diagnostic logs uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-log-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: ~/Library/Logs/DiagnosticReports/ - name: Uploading objdir for debugging uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: name: crash-objdir-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: objdir-clone/ @@ -240,8 +240,7 @@ jobs: container: image: connectedhomeip/chip-build:0.5.79 - options: - --privileged --sysctl "net.ipv6.conf.all.disable_ipv6=0 + options: --privileged --sysctl "net.ipv6.conf.all.disable_ipv6=0 net.ipv4.conf.all.forwarding=1 net.ipv6.conf.all.forwarding=1" steps: @@ -249,8 +248,7 @@ jobs: uses: actions/checkout@v2 - name: Checkout submodules run: scripts/checkout_submodules.py --shallow --platform linux - - name: - Try to ensure the directories for core dumping exist and we + - name: Try to ensure the directories for core dumping exist and we can write them. run: | mkdir /tmp/cores || true @@ -261,43 +259,40 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: - name: - bootstrap-logs-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: bootstrap-logs-linux-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: | .environment/gn_out/.ninja_log .environment/pigweed-venv/*.log - name: Build Python REPL and example apps timeout-minutes: 50 run: | - scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env' - ./scripts/run_in_build_env.sh \ - "./scripts/build/build_examples.py \ - --target linux-x64-all-clusters-no-ble-no-wifi-tsan-clang-test \ - --target linux-x64-python-bindings \ - build \ - --copy-artifacts-to objdir-clone \ - " + scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env' + ./scripts/run_in_build_env.sh \ + "./scripts/build/build_examples.py \ + --target linux-x64-all-clusters-no-ble-no-wifi-tsan-clang-test \ + --target linux-x64-python-bindings \ + build \ + --copy-artifacts-to objdir-clone \ + " - name: Run Tests timeout-minutes: 40 run: | - scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --script-args "--log-level INFO -t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout"' + scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --script-args "--log-level INFO -t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout"' - name: Uploading core files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-core-linux-python-repl + name: crash-core-linux-python-repl path: /tmp/cores/ # Cores are big; don't hold on to them too long. retention-days: 5 - name: Uploading objdir for debugging uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-objdir-linux-python-repl + name: crash-objdir-linux-python-repl path: objdir-clone/ # objdirs are big; don't hold on to them too long. retention-days: 5 @@ -344,49 +339,45 @@ jobs: run: scripts/build/gn_bootstrap.sh - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: - name: - bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} + name: bootstrap-logs-darwin-${{ matrix.build_variant }}${{ matrix.chip_tool }} path: | .environment/gn_out/.ninja_log .environment/pigweed-venv/*.log - name: Build Python REPL and example apps timeout-minutes: 50 run: | - scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env' - ./scripts/run_in_build_env.sh \ - "./scripts/build/build_examples.py \ - --target darwin-x64-all-clusters-${BUILD_VARIANT}-test \ - build \ - --copy-artifacts-to objdir-clone \ - " + scripts/run_in_build_env.sh './scripts/build_python.sh --install_wheel build-env' + ./scripts/run_in_build_env.sh \ + "./scripts/build/build_examples.py \ + --target darwin-x64-all-clusters-${BUILD_VARIANT}-test \ + build \ + --copy-artifacts-to objdir-clone \ + " - name: Run Tests timeout-minutes: 30 run: | - scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app out/darwin-x64-all-clusters-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --app-args "--discriminator 3840 --interface-id -1" --script-args "-t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout"' + scripts/run_in_build_env.sh './scripts/tests/run_python_test.py --app out/darwin-x64-all-clusters-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --app-args "--discriminator 3840 --interface-id -1" --script-args "-t 3600 --disable-test ClusterObjectTests.TestTimedRequestTimeout"' - name: Uploading core files uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-core-darwin-python-repl + name: crash-core-darwin-python-repl path: /cores/ # Cores are big; don't hold on to them too long. retention-days: 5 - name: Uploading diagnostic logs uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-log-darwin-python-repl + name: crash-log-darwin-python-repl path: ~/Library/Logs/DiagnosticReports/ - name: Uploading objdir for debugging uses: actions/upload-artifact@v2 - if: ${{ failure() }} && ${{ !env.ACT }} + if: ${{ failure() && !env.ACT }} with: - name: - crash-objdir-darwin-python-repl + name: crash-objdir-darwin-python-repl path: objdir-clone/ # objdirs are big; don't hold on to them too long. retention-days: 5 diff --git a/.github/workflows/unit_integration_test.yaml b/.github/workflows/unit_integration_test.yaml index d13919cd245a64..99a9398363f011 100644 --- a/.github/workflows/unit_integration_test.yaml +++ b/.github/workflows/unit_integration_test.yaml @@ -60,7 +60,7 @@ jobs: scripts/build/gn_bootstrap.sh ; - name: Uploading bootstrap logs uses: actions/upload-artifact@v2 - if: ${{ always() }} && ${{ !env.ACT }} + if: ${{ always() && !env.ACT }} with: name: bootstrap-logs path: | diff --git a/.gitmodules b/.gitmodules index f19fe86fa2da4d..ed490c7ed0dd40 100644 --- a/.gitmodules +++ b/.gitmodules @@ -241,7 +241,7 @@ path = third_party/ti_simplelink_sdk/repo_cc32xx url = https://github.com/TexasInstruments/cc32xx_open_sdk.git branch = main - platform = cc32xx + platforms = cc32xx [submodule "third_party/nxp/mw320_sdk/repo"] path = third_party/nxp/mw320_sdk/repo url = https://github.com/nxptest/mw320_sdk diff --git a/.restyled.yaml b/.restyled.yaml index 937a8b4afa2a53..95780a56920d35 100644 --- a/.restyled.yaml +++ b/.restyled.yaml @@ -72,6 +72,7 @@ exclude: - "scripts/idl/tests/outputs/**/*" # Matches generated output 1:1 - "examples/chef/sample_app_util/test_files/*.yaml" - "examples/chef/zzz_generated/**/*" + - "src/darwin/Framework/CHIP/zap-generated/MTRClustersObjc.mm" # https://github.com/project-chip/connectedhomeip/issues/20236 changed_paths: diff --git a/.vscode/settings.json b/.vscode/settings.json index ac8bf404d4ec08..a8e2b99d4e8a70 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -117,6 +117,13 @@ "random": "cpp", "thread": "cpp" }, + // Configure paths or glob patterns to exclude from file watching. + "files.watcherExclude": { + "**/.git/objects/**": true, + "**/.git/subtree-cache/**": true, + "out/": true, + "**/third_party/**": true + }, "files.eol": "\n", "editor.formatOnSave": true, "better-comments.tags": [ diff --git a/BUILD.gn b/BUILD.gn index 7e3cc729bb96ea..f02d6fb3e6888f 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -226,9 +226,9 @@ if (current_toolchain != "${dir_pw_toolchain}/default:default") { enable_standalone_chip_tool_build = enable_default_builds && host_os != "win" - # Build the darwin-framework-tool example. - enable_standalone_chip_tool_darwin_build = - enable_default_builds && host_os == "mac" + # Build the darwin-framework-tool example. By default this is off, because + # it requires a preceding XCode build of the framework. + enable_standalone_chip_tool_darwin_build = false # Build the shell example. enable_standalone_shell_build = enable_default_builds && host_os != "win" diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn index 35c51811418096..c5f6392613b749 100644 --- a/build/config/compiler/BUILD.gn +++ b/build/config/compiler/BUILD.gn @@ -453,7 +453,7 @@ config("coverage_default") { declare_args() { # Enable Runtime Type Information (RTTI) - enable_rtti = false + enable_rtti = current_os == "mac" || current_os == "ios" } config("no_rtti") { diff --git a/config/ameba/args.gni b/config/ameba/args.gni index 57c21e2d02e8c2..d09b1f5cba9843 100755 --- a/config/ameba/args.gni +++ b/config/ameba/args.gni @@ -15,6 +15,9 @@ # Options from standalone-chip.mk that differ from configure defaults. These # options are used from examples/. +import("//build_overrides/pigweed.gni") +import("$dir_pw_span/polyfill.gni") + chip_device_platform = "ameba" chip_project_config_include = "" @@ -35,4 +38,5 @@ custom_toolchain = "//third_party/connectedhomeip/config/ameba/toolchain:ameba" pw_build_PIP_CONSTRAINTS = [ "//third_party/connectedhomeip/scripts/constraints.txt" ] +pw_span_ENABLE_STD_SPAN_POLYFILL = false cpp_standard = "c++17" diff --git a/config/bouffalolab/bl602/lib/pw_rpc/BUILD.gn b/config/bouffalolab/bl602/lib/pw_rpc/BUILD.gn index 66ab51fc23adf0..8f11811b370736 100644 --- a/config/bouffalolab/bl602/lib/pw_rpc/BUILD.gn +++ b/config/bouffalolab/bl602/lib/pw_rpc/BUILD.gn @@ -24,7 +24,7 @@ static_library("pw_rpc") { public_deps = [ "$dir_pw_rpc:server", "$dir_pw_rpc/nanopb:echo_service", - "${chip_root}/examples/platform/bl602/pw_sys_io:pw_sys_io_bl602", + "${chip_root}/examples/platform/bouffalolab/bl602/pw_sys_io:pw_sys_io_bl602", "${dir_pigweed}/pw_hdlc:pw_rpc", dir_pw_assert, dir_pw_checksum, diff --git a/config/bouffalolab/bl602/lib/pw_rpc/pw_rpc.gni b/config/bouffalolab/bl602/lib/pw_rpc/pw_rpc.gni index 69bd1634363ebc..87e772e7c7ba96 100644 --- a/config/bouffalolab/bl602/lib/pw_rpc/pw_rpc.gni +++ b/config/bouffalolab/bl602/lib/pw_rpc/pw_rpc.gni @@ -16,9 +16,10 @@ import("//build_overrides/chip.gni") import("//build_overrides/pigweed.gni") pw_log_BACKEND = "$dir_pw_log_basic" -pw_assert_BACKEND = "$dir_pw_assert_log:check_backend" +pw_assert_BACKEND = "$dir_pw_assert_log" pw_sys_io_BACKEND = - "${chip_root}/examples/platform/bl602/pw_sys_io:pw_sys_io_bl602" + "${chip_root}/examples/platform/bouffalolab/bl602/pw_sys_io:pw_sys_io_bl602" +pw_span_ENABLE_STD_SPAN_POLYFILL = false pw_build_LINK_DEPS = [ "$dir_pw_assert:impl", diff --git a/config/efr32/lib/pw_rpc/pw_rpc.gni b/config/efr32/lib/pw_rpc/pw_rpc.gni index 68f1c6154f4a91..2c8aaf4d14fba1 100644 --- a/config/efr32/lib/pw_rpc/pw_rpc.gni +++ b/config/efr32/lib/pw_rpc/pw_rpc.gni @@ -19,6 +19,7 @@ pw_log_BACKEND = "$dir_pw_log_basic" pw_assert_BACKEND = "$dir_pw_assert_log:check_backend" pw_sys_io_BACKEND = "${chip_root}/examples/platform/efr32/pw_sys_io:pw_sys_io_efr32" +pw_span_ENABLE_STD_SPAN_POLYFILL = false pw_build_LINK_DEPS = [ "$dir_pw_assert:impl", diff --git a/config/esp32/args.gni b/config/esp32/args.gni index 01308ef61f809c..1f6a6bf0987103 100644 --- a/config/esp32/args.gni +++ b/config/esp32/args.gni @@ -14,7 +14,8 @@ # Options from standalone-chip.mk that differ from configure defaults. These # options are used from examples/. - +import("//build_overrides/pigweed.gni") +import("$dir_pw_span/polyfill.gni") chip_device_platform = "esp32" chip_project_config_include = "" @@ -38,3 +39,4 @@ custom_toolchain = "//third_party/connectedhomeip/config/esp32/toolchain:esp32" # whatever pigweed ships with pw_build_PIP_CONSTRAINTS = [ "//third_party/connectedhomeip/scripts/constraints.txt" ] +pw_span_ENABLE_STD_SPAN_POLYFILL = false diff --git a/config/ios/CHIPProjectConfig.h b/config/ios/CHIPProjectConfig.h index 6a9d7eb3213836..dfa35f7cfd5254 100644 --- a/config/ios/CHIPProjectConfig.h +++ b/config/ios/CHIPProjectConfig.h @@ -44,4 +44,9 @@ #define CHIP_CONFIG_KVS_PATH "chip.store" #endif +// The session pool size limits how many subscriptions we can have live at +// once. Home supports up to 1000 accessories, and we subscribe to all of them, +// so we need to make sure the pool is big enough for that. +#define CHIP_CONFIG_SECURE_SESSION_POOL_SIZE 1000 + #endif /* CHIPPROJECTCONFIG_H */ diff --git a/config/mbed/chip-gn/lib/pw_rpc/pw_rpc.gni b/config/mbed/chip-gn/lib/pw_rpc/pw_rpc.gni index 2b03f6b4c9958c..b2033e5280c539 100644 --- a/config/mbed/chip-gn/lib/pw_rpc/pw_rpc.gni +++ b/config/mbed/chip-gn/lib/pw_rpc/pw_rpc.gni @@ -21,6 +21,7 @@ pw_sys_io_BACKEND = "${chip_root}/examples/platform/mbed/pw_sys_io:pw_sys_io_mbed" pw_rpc_system_server_BACKEND = "${chip_root}/examples/common/pigweed:system_rpc_server" +pw_span_ENABLE_STD_SPAN_POLYFILL = false pw_build_LINK_DEPS = [ "$dir_pw_assert:impl", diff --git a/config/qpg/lib/pw_rpc/pw_rpc.gni b/config/qpg/lib/pw_rpc/pw_rpc.gni index 60fb93e6b8fc44..4100605a1445b1 100644 --- a/config/qpg/lib/pw_rpc/pw_rpc.gni +++ b/config/qpg/lib/pw_rpc/pw_rpc.gni @@ -18,6 +18,7 @@ import("//build_overrides/pigweed.gni") pw_log_BACKEND = "$dir_pw_log_basic" pw_assert_BACKEND = "$dir_pw_assert_log:check_backend" pw_sys_io_BACKEND = "${chip_root}/examples/platform/qpg/pw_sys_io:pw_sys_io_qpg" +pw_span_ENABLE_STD_SPAN_POLYFILL = false pw_build_LINK_DEPS = [ "$dir_pw_assert:impl", diff --git a/docs/guides/nrfconnect_factory_data_configuration.md b/docs/guides/nrfconnect_factory_data_configuration.md new file mode 100644 index 00000000000000..786042f7c785a7 --- /dev/null +++ b/docs/guides/nrfconnect_factory_data_configuration.md @@ -0,0 +1,705 @@ +# Configuring factory data for the nRF Connect examples + +Factory data is a set of device parameters written to the non-volatile memory +during the manufacturing process. This guide describes the process of creating +and programming factory data using Matter and the nRF Connect platform from +Nordic Semiconductor. + +The factory data parameter set includes different types of information, for +example about device certificates, cryptographic keys, device identifiers, and +hardware. All those parameters are vendor-specific and must be inserted into a +device's persistent storage during the manufacturing process. The factory data +parameters are read at the boot time of a device. Then, they can be used in the +Matter stack and user application (for example during commissioning). + +All of the factory data parameters are protected against modifications by the +software, and the firmware data parameter set must be kept unchanged during the +lifetime of the device. When implementing your firmware, you must make sure that +the factory data parameters are not re-written or overwritten during the Device +Firmware Update (DFU) or factory resets, except in some vendor-defined cases. + +For the nRF Connect platform, the factory data is stored by default in a +separate partition of the internal flash memory. This helps to keep the factory +data secure by applying hardware write protection. + +

+ Nordic Semiconductor logo + nRF52840 DK +

+ +
+ +- [Overview](#overview) + - [Factory data components](#factory-data-components) + - [Factory data format](#factory-data-format) +- [Enabling factory data support](#enabling-factory-data-support) +- [Generating factory data](#generating-factory-data) + - [Creating factory data JSON file with the first script](#creating-factory-data-json-file-with-the-first-script) + - [Verifying using the JSON Schema tool](#verifying-using-the-json-schema-tool) + - [Option 1: Using the php-json-schema tool](#option-1-using-the-php-json-schema-tool) + - [Option 2: Using a website validator](#option-2-using-a-website-validator) + - [Option 3: Using the nRF Connect Python script](#option-3-using-the-nrf-connect-python-script) + - [Preparing factory data partition on a device](#preparing-factory-data-partition-on-a-device) + - [Creating the factory data partition with the second script](#creating-the-factory-data-partition-with-the-second-script) +- [Building an example with factory data](#building-an-example-with-factory-data) + - [Providing factory data parameters as a build argument list](#providing-factory-data-parameters-as-a-build-argument-list) + - [Setting factory data parameters using interactive Kconfig interfaces](#setting-factory-data-parameters-using-interactive-kconfig-interfaces) +- [Programming factory data](#programming-factory-data) +- [Using own factory data implementation](#using-own-factory-data-implementation) + +
+ + + +## Overview + +You can implement the factory data set described in the +[factory data component table](#factory-data-component-table) in various ways, +as long as the final HEX file contains all mandatory components defined in the +table. In this guide, the [generating factory data](#generating-factory-data) +and the +[building an example with factory data](#building-an-example-with-factory-data) +sections describe one of the implementations of the factory data set created by +the nRF Connect platform's maintainers. At the end of the process, you get a HEX +file that contains the factory data partition in the CBOR format. + +The factory data accessor is a component that reads and decodes factory data +parameters from the device's persistent storage and creates an interface to +provide all of them to the Matter stack and to the user application. + +The default implementation of the factory data accessor assumes that the factory +data stored in the device's flash memory is provided in the CBOR format. +However, it is possible to generate the factory data set without using the nRF +Connect scripts and implement another parser and a factory data accessor. This +is possible if the newly provided implementation is consistent with the +[Factory Data Provider](../../src/platform/nrfconnect/FactoryDataProvider.h). +For more information about preparing a factory data accessor, see the section +about +[using own factory data implementation](#using-own-factory-data-implementation). + +> Note: Encryption and security of the factory data partition is not provided +> yet for this feature. + +### Factory data component table + +The following table lists the parameters of a factory data set: + +| Key name | Full name | Length | Format | Conformance | Description | +| :---------------: | :----------------------------------: | :--------: | :----------: | :---------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| `version` | factory data version | 2 B | uint16 | mandatory | A version of the current factory data set. It cannot be changed by a user and it must be coherent with current version of the Factory Data Provider on device side. | +| `sn` | serial number | <1, 32> B | ASCII string | mandatory | A serial number parameter defines an unique number of manufactured device. The maximum length of the serial number is 32 characters. | +| `vendor_id` | vendor ID | 2 B | uint16 | mandatory | A CSA-assigned ID for the organization responsible for producing the device. | +| `product_id` | product ID | 2 B | uint16 | mandatory | A unique ID assigned by the device vendor to identify the product. It defaults to a CSA-assigned ID that designates a non-production or test product. | +| `vendor_name` | vendor name | <1, 32> B | ASCII string | mandatory | A human-readable vendor name that provides a simple string containing identification of device's vendor for the application and Matter stack purposes. | +| `product_name` | product name | <1, 32> B | ASCII string | mandatory | A human-readable product name that provides a simple string containing identification of the product for the application and the Matter stack purposes. | +| `date` | manufacturing date | <8, 10> B | ISO 8601 | mandatory | A manufacturing date specifies the date that the device was manufactured. The date format used is ISO 8601, for example `YYYY-MM-DD`. | +| `hw_ver` | hardware version | 2 B | uint16 | mandatory | A hardware version number that specifies the version number of the hardware of the device. The value meaning and the versioning scheme is defined by the vendor. | +| `hw_ver_str` | hardware version string | <1, 64> B | uint16 | mandatory | A hardware version string parameter that specifies the version of the hardware of the device as a more user-friendly value than that presented by the hardware version integer value. The value meaning and the versioning scheme is defined by the vendor. | +| `rd_uid` | rotating device ID unique ID | <16, 32> B | byte string | mandatory | The unique ID for rotating device ID, which consists of a randomly-generated 128-bit (or longer) octet string. This parameter should be protected against reading or writing over-the-air after initial introduction into the device, and stay fixed during the lifetime of the device. | +| `dac_cert` | (DAC) Device Attestation Certificate | <1, 602> B | byte string | mandatory | The Device Attestation Certificate (DAC) and the corresponding private key are unique to each Matter device. The DAC is used for the Device Attestation process and to perform commissioning into a fabric. The DAC is a DER-encoded X.509v3-compliant certificate, as defined in RFC 5280. | +| `dac_key` | DAC private key | 68 B | byte string | mandatory | The private key associated with the Device Attestation Certificate (DAC). This key should be encrypted and maximum security should be guaranteed while generating and providing it to factory data. | +| `pai_cert` | Product Attestation Intermediate | <1, 602> B | byte string | mandatory | An intermediate certificate is an X.509 certificate, which has been signed by the root certificate. The last intermediate certificate in a chain is used to sign the leaf (the Matter device) certificate. The PAI is a DER-encoded X.509v3-compliant certificate as defined in RFC 5280. | | +| `spake2_it` | SPAKE2+ iteration counter | 4 B | uint32 | mandatory | A SPAKE2+ iteration counter is the amount of PBKDF2 (a key derivation function) interactions in a cryptographic process used during SPAKE2+ Verifier generation. | +| `spake2_salt` | SPAKE2+ salt | <32, 64> B | byte string | mandatory | The SPAKE2+ salt is a random piece of data, at least 32 byte long. It is used as an additional input to a one-way function that performs the cryptographic operations. A new salt should be randomly generated for each password. | +| `spake2_verifier` | SPAKE2+ verifier | 97 B | byte string | mandatory | The SPAKE2+ verifier generated using SPAKE2+ salt, iteration counter, and passcode. | +| `discriminator` | Discriminator | 2 B | uint16 | mandatory | A 12-bit value matching the field of the same name in the setup code. The discriminator is used during the discovery process. | +| `passcode` | SPAKE passcode | 4 B | uint32 | optional | A pairing passcode is a 27-bit unsigned integer which serves as a proof of possession during the commissioning. Its value must be restricted to the values from `0x0000001` to `0x5F5E0FE` (`00000001` to `99999998` in decimal), excluding the following invalid passcode values: `00000000`, `11111111`, `22222222`, `33333333`, `44444444`, `55555555`, `66666666`, `77777777`, `88888888`, `99999999`, `12345678`, `87654321`. | +| `user` | User data | variable | JSON string | max 1024 B | The user data is provided in the JSON format. This parameter is optional and depends on user's or manufacturer's purpose (or both). It is provided as a string from persistent storage and should be parsed in the user application. This data is not used by the Matter stack. | + +### Factory data format + +The factory data set must be saved into a HEX file that can be written to the +flash memory of the Matter device. + +In the nRF Connect example, the factory data set is represented in the CBOR +format and is stored in a HEX file. The file is then programmed to a device. The +JSON format is used as an intermediate, human-readable representation of the +data. The format is regulated by the +[JSON Schema](https://github.com/project-chip/connectedhomeip/blob/master/scripts/tools/nrfconnect/nrfconnect_factory_data.schema) +file. + +All parameters of the factory data set are either mandatory or optional: + +- Mandatory parameters must always be provided, as they are required for + example to perform commissioning to the Matter network. +- Optional parameters can be used for development and testing purposes. For + example, the `user` data parameter consists of all data that is needed by a + specific manufacturer and that is not included in the mandatory parameters. + +In the factory data set, the following formats are used: + +- uint16 and uint32 -- These are the numeric formats representing, + respectively, two-bytes length unsigned integer and four-bytes length + unsigned integer. This value is stored in a HEX file in the big-endian + order. +- Byte string - This parameter represents the sequence of integers between `0` + and `255`(inclusive), without any encoding. Because the JSON format does not + allow to use of byte strings, the `hex:` prefix is added to a parameter, and + its representation is converted to a HEX string. For example, an ASCII + string _`abba`_ is represented as _`hex:61626261`_ in the JSON file and then + stored in the HEX file as `0x61626261`. The HEX string length in the JSON + file is two times greater than the byte string plus the size of the prefix. +- ASCII string is a string representation in ASCII encoding without + null-terminating. +- ISO 8601 format is a + [date format](https://www.iso.org/iso-8601-date-and-time-format.html) that + represents a date provided in the `YYYY-MM-DD` or `YYYYMMDD` format. +- All certificates stored in factory data are provided in the + [X.509](https://www.itu.int/rec/T-REC-X.509-201910-I/en) format. + +
+ + +## Enabling factory data support + +By default, the factory data support is disabled in all nRF Connect examples and +the nRF Connect device uses predefined parameters from the Matter core, which +you should not change. To start using factory data stored in the flash memory +and the **Factory Data Provider** from the nRF Connect platform, build an +example with the following option (replace __ with your board +name, for example, `nrf52840dk_nrf52840`): + +``` +$ west build -b -- -DCONFIG_CHIP_FACTORY_DATA=y +``` + +## Generating factory data + +This section describes generating factory data using the following nRF Connect +Python scripts: + +- The first script creates a JSON file that contains a user-friendly + representation of the factory data. +- The second script uses the JSON file to create a factory data partition and + save it to a HEX file. + +After these operations, you will program a HEX file containing factory data +partition into the device's flash memory. + +You can use the second script without invoking the first one by providing a JSON +file written in another way. To make sure that the JSON file is correct and the +device is able to read out parameters, verify the file using the +[JSON schema](#verifying-using-a-json-schema). + +### Creating factory data JSON file with the first script + +A Matter device needs a proper factory data partition stored in the flash memory +to read out all required parameters during startup. To simplify the factory data +generation, you can use the +[generate_nrfconnect_chip_factory_data.py](../../scripts/tools/nrfconnect/generate_nrfconnect_chip_factory_data.py) +Python script to provide all required parameters and generate a human-readable +JSON file. + +To use this script, complete the following steps: + +1. Navigate to the `connectedhomeip` root directory. + +2. Run the script with `-h` option to see all possible options: + +``` +$ python scripts/tools/nrfconnect/generate_nrfconnect_chip_factory_data.py -h +``` + +3. Prepare a list of arguments: + + a. Fill up all mandatory arguments: + + ``` + --sn --vendor_id, --product_id, --vendor_name, --product_name, --date, --hw_ver, --hw_ver_str, --spake2_it, --spake2_salt, --discriminator + ``` + + b. Add output file path: + + ``` + -o + ``` + + c. Generate SPAKE2 verifier using one of the following methods: + + - Automatic: + + ``` + --passcode --spake2p_path + ``` + + - Manual: + + ``` + --spake2_verifier + ``` + + d. Add paths to `.der` files that contain PAI and DAC certificates and the + DAC private key (replace the respective variables with the file names): + + ``` + --dac_cert .der --dac_key .der --pai_cert .der + ``` + + e. (optional) Add the new unique ID for rotating device ID using one of the + following options: + + - Provide an existing ID: + + ``` + --rd_uid + ``` + + - Generate a new ID and provide it (): + + ``` + --generate_rd_uid + --rd_uid + ``` + + You can find a newly generated unique ID in the console output. + + f. (optional) Add the JSON schema to verify the JSON file (replace the + respective variable with the file path): + + ``` + --schema + ``` + + g. (optional) Add a request to include a pairing passcode in the JSON file: + + ``` + --include_passcode + ``` + + h. (optional) Add the request to overwrite existing the JSON file: + + ``` + --overwrite + ``` + +4. Run the script using the prepared list of arguments: + +``` +$ python generate_nrfconnect_chip_factory_data.py +``` + +For example, a final invocation of the Python script can look similar to the +following one: + +``` +$ python scripts/tools/nrfconnect/generate_nrfconnect_chip_factory_data.py \ +--sn "11223344556677889900" \ +--vendor_id 65521 \ +--product_id 32774 \ +--vendor_name "Nordic Semiconductor ASA" \ +--product_name "not-specified" \ +--date "2022-02-02" \ +--hw_ver 1 \ +--hw_ver_str "prerelase" \ +--dac_cert "credentials/development/attestation/Matter-Development-DAC-8006-Cert.der" \ +--dac_key "credentials/development/attestation/Matter-Development-DAC-8006-Key.der" \ +--pai_cert "credentials/development/attestation/Matter-Development-PAI-noPID-Cert.der" \ +--spake2_it 1000 \ +--spake2_salt "U1BBS0UyUCBLZXkgU2FsdA==" \ +--discriminator 0xF00 \ +--generate_rd_uid \ +--passcode 20202021 \ +--spake2p_path "src/tools/spake2p/out/spake2p" \ +--out "build.json" \ +--schema "scripts/tools/nrfconnect/nrfconnect_factory_data.schema" +``` + +As the result of the above example, a unique ID for the rotating device ID is +created, SPAKE2+ verifier is generated using the `spake2p` executable, and the +JSON file is verified using the prepared JSON Schema. + +If the script finishes successfully, go to the location you provided with the +`-o` argument. Use the JSON file you find there when +[generating the factory data partition](#generating_factory_data_partition). + +> Note: Generating the SPAKE2+ verifier is optional and requires providing a +> path to the `spake2p` executable. To get it, complete the following steps: +> +> 1. Navigate to the `connectedhomeip` root directory. +> 2. In a terminal, run the command: +> `cd src/tools/spake2p && gn gen out && ninja -C out spake2p` to build the +> executable. +> 3. Add the `connectedhomeip/src/tools/spake2p/out/spake2p` path as an +> argument of `--spake2p_path` for the Python script. + +> Note: By default, overwriting the existing JSON file is disabled. This means +> that you cannot create a new JSON file with the same name in the exact +> location as an existing file. To allow overwriting, add the `--overwrite` +> option to the argument list of the Python script. + +### Verifying using the JSON Schema tool + +The JSON file that contains factory data can be verified using the +[JSON Schema file](https://github.com/project-chip/connectedhomeip/blob/master/scripts/tools/nrfconnect/nrfconnect_factory_data.schema). +You can use one of three options to validate the structure and contents of the +JSON data. + +#### Option 1: Using the php-json-schema tool + +To check the JSON file using a JSON Schema verification tool manually on a Linux +machine, complete the following steps: + +1. Install the `php-json-schema` package: + +``` +$ sudo apt install php-json-schema +``` + +2. Run the following command, with __ and + __ replaced with the paths to the JSON file and the + Schema file, respectively: + +``` +$ validate-json +``` + +The tool returns empty output in case of success. + +#### Option 2: Using a website validator + +You can also use external websites instead of the `php-json-schema` tool to +verify a factory data JSON file. For example, go to the +[JSON Schema Validator website](https://www.jsonschemavalidator.net/), +copy-paste the content of the +[JSON Schema file](https://github.com/project-chip/connectedhomeip/blob/master/scripts/tools/nrfconnect/nrfconnect_factory_data.schema) +to the first window and a JSON file to the second one. A message under the +window indicates the validation status. + +#### Option 3: Using the nRF Connect Python script + +You can have the JSON file checked automatically by the Python script during the +file generation. For this to happen, provide the path to the JSON schema file as +an additional argument, which should replace the __ variable in +the following command: + +``` +$ python generate_nrfconnect_chip_factory_data.py --schema +``` + +> Note: To learn more about the JSON schema, visit +> [this unofficial JSON Schema tool usage website](https://json-schema.org/understanding-json-schema/). + +### Preparing factory data partition on a device + +The factory data partition is an area in the device's persistent storage where a +factory data set is stored. This area is configured using the +[Partition Manager](https://developer.nordicsemi.com/nRF_Connect_SDK/doc/latest/nrf/scripts/partition_manager/partition_manager.html), +within which all partitions are declared in the `pm_static.yml` file. + +To prepare an example that supports factory data, add a partition called +`factory_data` to the `pm_static.yml` file. The partition size should be a +multiple of one flash page (for nRF52 and nRF53 SoCs, a single page size equals +4 kB). + +See the following code snippet for an example of a factory data partition in the +`pm_static.yml` file. The snippet is based on the `pm_static.yml` file from the +[Lock application example](../../examples/lock-app/nrfconnect/configuration/nrf52840dk_nrf52840/pm_static_dfu.yml) +and uses the nRF52840 DK: + +``` +... +mcuboot_primary_app: + orig_span: &id002 + - app + span: *id002 + address: 0x7200 + size: 0xf3e00 + +factory_data: + address: 0xfb00 + size: 0x1000 + region: flash_primary + +settings_storage: + address: 0xfc000 + size: 0x4000 + region: flash_primary +... +``` + +In this example, a `factory_data` partition has been placed between the +application partition (`mcuboot_primary_app`) and the settings storage. Its size +has been set to one flash page (4 kB). + +Use Partition Manager's report tool to ensure you created a factory data +partition correctly. To do that, navigate to the example directory and run the +following command: + +``` +$ west build -t partition_manager_report +``` + +The output will look similar to the following one: + +``` + + external_flash (0x800000 - 8192kB): ++---------------------------------------------+ +| 0x0: mcuboot_secondary (0xf4000 - 976kB) | +| 0xf4000: external_flash (0x70c000 - 7216kB) | ++---------------------------------------------+ + + flash_primary (0x100000 - 1024kB): ++-------------------------------------------------+ +| 0x0: mcuboot (0x7000 - 28kB) | ++---0x7000: mcuboot_primary (0xf4000 - 976kB)-----+ +| 0x7000: mcuboot_pad (0x200 - 512B) | ++---0x7200: mcuboot_primary_app (0xf3e00 - 975kB)-+ +| 0x7200: app (0xf3e00 - 975kB) | ++-------------------------------------------------+ +| 0xfb000: factory_data (0x1000 - 4kB) | +| 0xfc000: settings_storage (0x4000 - 16kB) | ++-------------------------------------------------+ + + sram_primary (0x40000 - 256kB): ++--------------------------------------------+ +| 0x20000000: sram_primary (0x40000 - 256kB) | ++--------------------------------------------+ + +``` + +### Creating a factory data partition with the second script + +To store the factory data set in the device's persistent storage, convert the +data from the JSON file to its binary representation in the CBOR format. To do +this, use the +[nrfconnect_generate_partition.py](../../scripts/tools/nrfconnect/nrfconnect_generate_partition.py) +to generate the factory data partition: + +1. Navigate to the _connectedhomeip_ root directory +2. Run the following command pattern: + +``` +$ python scripts/tools/nrfconnect/nrfconnect_generate_partition.py -i -o --offset --size +``` + +In this command: + +- __ is a path to the JSON file containing appropriate + factory data. +- __ is a path to an output file without any prefix. For + example, providing `/build/output` as an argument will result in creating + `/build/output.hex` and `/build/output.bin`. +- __ is an address in the device's persistent + storage area where a partition data set is to be stored. +- __ is a size of partition in the device's persistent storage + area. New data is checked according to this value of the JSON data to see if + it fits the size. + +To see the optional arguments for the script, use the following command: + +``` +$ python scripts/tools/nrfconnect/nrfconnect_generate_partition.py -h +``` + +**Example of the command for the nRF52840 DK:** + +``` +$ python scripts/tools/nrfconnect/nrfconnect_generate_partition.py -i build/zephyr/factory_data.json -o build/zephyr/factory_data --offset 0xfb000 --size 0x1000 +``` + +As a result, `factory_data.hex` and `factory_data.bin` files are created in the +`/build/zephyr/` directory. The first file contains the memory offset. For this +reason, it can be programmed directly to the device using a programmer (for +example, `nrfjprog`). + +
+ + +## Building an example with factory data + +You can manually generate the factory data set using the instructions described +in the [Generating factory data](#generating-factory-data) section. Another way +is to use the nRF Connect platform build system that creates factory data +content automatically using Kconfig options and includes the content in the +final firmware binary. + +To enable generating the factory data set automatically, go to the example's +directory and build the example with the following option (replace +`nrf52840dk_nrf52840` with your board name): + +``` +$ west build -b nrf52840dk_nrf52840 -- -DCONFIG_CHIP_FACTORY_DATA=y -DCONFIG_CHIP_FACTORY_DATA_BUILD=y +``` + +Alternatively, you can also add `CONFIG_CHIP_FACTORY_DATA_BUILD=y` Kconfig +setting to the example's `prj.conf` file. + +Each factory data parameter has a default value. These are described in the +[Kconfig file](../../config/nrfconnect/chip-module/Kconfig). Setting a new value +for the factory data parameter can be done either by providing it as a build +argument list or by using interactive Kconfig interfaces. + +### Providing factory data parameters as a build argument list + +This way for providing factory data can be used with third-party build script, +as it uses only one command. All parameters can be edited manually by providing +them as an additional option for the west command. For example (replace +`nrf52840dk_nrf52840` with own board name): + +``` +$ west build -b nrf52840dk_nrf52840 -- -DCONFIG_CHIP_FACTORY_DATA=y --DCONFIG_CHIP_FACTORY_DATA_BUILD=y --DCONFIG_CHIP_DEVICE_DISCRIMINATOR=0xF11 +``` + +Alternatively, you can add the relevant Kconfig option lines to the example's +`prj.conf` file. + +### Setting factory data parameters using interactive Kconfig interfaces + +You can edit all configuration options using the interactive Kconfig interface. + +See the +[Configuring nRF Connect examples](../guides/nrfconnect_examples_configuration.md) +page for information about how to configure Kconfig options. + +In the configuration window, expand the items +`Modules -> connectedhomeip (/home/arbl/matter/connectedhomeip/config/nrfconnect/chip-module) -> Connected Home over IP protocol stack`. +You will see all factory data configuration options, as in the following +snippet: + +``` +(65521) Device vendor ID +(32774) Device product ID +[*] Enable Factory Data build +[*] Enable merging generated factory data with the build tar +[*] Use default certificates located in Matter repository +[ ] Enable SPAKE2 verifier generation +[*] Enable generation of a new Rotating device id unique id +(11223344556677889900) Serial number of device +(Nordic Semiconductor ASA) Human-readable vendor name +(not-specified) Human-readable product name +(2022-01-01) Manufacturing date in ISO 8601 +(0) Integer representation of hardware version +(prerelease) user-friendly string representation of hardware ver +(0xF00) Device pairing discriminator +(20202021) SPAKE2+ passcode +(1000) SPAKE2+ iteration count +(U1BBS0UyUCBLZXkgU2FsdA==) SPAKE2+ salt in string format +(uWFwqugDNGiEck/po7KHwwMwwqZgN10XuyBajPGuyzUEV/iree4lOrao5GuwnlQ +(91a9c12a7c80700a31ddcfa7fce63e44) A rotating device id unique i +``` + +> Note: To get more information about how to use the interactive Kconfig +> interfaces, read the +> [Kconfig docummentation](https://developer.nordicsemi.com/nRF_Connect_SDK/doc/latest/zephyr/build/kconfig/menuconfig.html). + +
+ + +## Programming factory data + +The HEX file containing factory data can be programmed into the device's flash +memory using `nrfjprog` and the J-Link programmer. To do this, use the following +command: + +``` +$ nrfjprog --program factory_data.hex +``` + +In this command, you can add the `--family` argument and provide the name of the +DK: `NRF52` for the nRF52840 DK or `NRF53` for the nRF5340 DK. For example: + +``` +$ nrfjprog --family NRF52 --program factory_data.hex +``` + +> Note: For more information about how to use the `nrfjprog` utility, visit +> [Nordic Semiconductor's Infocenter](https://infocenter.nordicsemi.com/index.jsp?topic=%2Fug_nrf_cltools%2FUG%2Fcltools%2Fnrf_nrfjprogexe.html). + +Another way to program the factory data to a device is to use the nRF Connect +platform build system described in +[Building an example with factory data](#building-an-example-with-factory-data), +and build an example with the additional option +`-DCONFIG_CHIP_FACTORY_DATA_MERGE_WITH_FIRMWARE=y`: + +``` +$ west build -b nrf52840dk_nrf52840 -- \ +-DCONFIG_CHIP_FACTORY_DATA=y \ +-DCONFIG_CHIP_FACTORY_DATA_BUILD=y \ +-DCONFIG_CHIP_FACTORY_DATA_MERGE_WITH_FIRMWARE=y +``` + +After that, use the following command from the example's directory to write +firmware and newly generated factory data at the same time: + +``` +$ west flash +``` + +
+ + +## Using own factory data implementation + +The [factory data generation process](#generating-factory-data) described above +is only an example valid for the nRF Connect platform. You can well create a HEX +file containing all [factory data components](#factory-data-components) in any +format and then implement a parser to read out all parameters and pass them to a +provider. Each manufacturer can implement a factory data set on its own by +implementing a parser and a factory data accessor inside the Matter stack. Use +the [nRF Connect Provider](../../src/platform/nrfconnect/FactoryDataProvider.h) +and [FactoryDataParser](../../src/platform/nrfconnect/FactoryDataParser.h) as +examples. + +You can read the factory data set from the device's flash memory in different +ways, depending on the purpose and the format. In the nRF Connect example, the +factory data is stored in the CBOR format. The device uses the +[Factory Data Parser](../../src/platform/nrfconnect/FactoryDataParser.h) to read +out raw data, decode it, and store it in the `FactoryData` structure. The +[Factor Data Provider](../../src/platform/nrfconnect/FactoryDataProvider.c) +implementation uses this parser to get all needed factory data parameters and +provide them to the Matter core. + +In the nRF Connect example, the `FactoryDataProvider` is a template class that +inherits from `DeviceAttestationCredentialsProvider`, +`CommissionableDataProvider`, and `DeviceInstanceInfoProvider` classes. Your +custom implementation must also inherit from these classes and implement their +functions to get all factory data parameters from the device's flash memory. +These classes are virtual and need to be overridden by the derived class. To +override the inherited classes, complete the following steps: + +1. Override the following methods: + +``` + // ===== Members functions that implement the DeviceAttestationCredentialsProvider + CHIP_ERROR GetCertificationDeclaration(MutableByteSpan & outBuffer) override; + CHIP_ERROR GetFirmwareInformation(MutableByteSpan & out_firmware_info_buffer) override; + CHIP_ERROR GetDeviceAttestationCert(MutableByteSpan & outBuffer) override; + CHIP_ERROR GetProductAttestationIntermediateCert(MutableByteSpan & outBuffer) override; + CHIP_ERROR SignWithDeviceAttestationKey(const ByteSpan & messageToSign, MutableByteSpan & outSignBuffer) override; + + // ===== Members functions that implement the CommissionableDataProvider + CHIP_ERROR GetSetupDiscriminator(uint16_t & setupDiscriminator) override; + CHIP_ERROR SetSetupDiscriminator(uint16_t setupDiscriminator) override; + CHIP_ERROR GetSpake2pIterationCount(uint32_t & iterationCount) override; + CHIP_ERROR GetSpake2pSalt(MutableByteSpan & saltBuf) override; + CHIP_ERROR GetSpake2pVerifier(MutableByteSpan & verifierBuf, size_t & verifierLen) override; + CHIP_ERROR GetSetupPasscode(uint32_t & setupPasscode) override; + CHIP_ERROR SetSetupPasscode(uint32_t setupPasscode) override; + + // ===== Members functions that implement the DeviceInstanceInfoProvider + CHIP_ERROR GetVendorName(char * buf, size_t bufSize) override; + CHIP_ERROR GetVendorId(uint16_t & vendorId) override; + CHIP_ERROR GetProductName(char * buf, size_t bufSize) override; + CHIP_ERROR GetProductId(uint16_t & productId) override; + CHIP_ERROR GetSerialNumber(char * buf, size_t bufSize) override; + CHIP_ERROR GetManufacturingDate(uint16_t & year, uint8_t & month, uint8_t & day) override; + CHIP_ERROR GetHardwareVersion(uint16_t & hardwareVersion) override; + CHIP_ERROR GetHardwareVersionString(char * buf, size_t bufSize) override; + CHIP_ERROR GetRotatingDeviceIdUniqueId(MutableByteSpan & uniqueIdSpan) override; +``` + +2. Move the newly created parser and provider files to your project directory. +3. Add the files to the `CMakeList.txt` file. +4. Disable building both the default and the nRF Connect implementations of + factory data providers to start using your own implementation of factory data + parser and provider. This can be done in one of the following ways: + +- Add `CONFIG_FACTORY_DATA_CUSTOM_BACKEND=y` Kconfig setting to `prj.conf` + file. +- Build an example with following option (replace __ with your + board name, for example `nrf52840dk_nrf52840`): + +``` + $ west build -b -- -DCONFIG_FACTORY_DATA_CUSTOM_BACKEND=y +``` diff --git a/examples/all-clusters-app/cc13x2x7_26x2x7/args.gni b/examples/all-clusters-app/cc13x2x7_26x2x7/args.gni index bd6e49f7871b4f..8f67c7f5edd0c7 100644 --- a/examples/all-clusters-app/cc13x2x7_26x2x7/args.gni +++ b/examples/all-clusters-app/cc13x2x7_26x2x7/args.gni @@ -33,6 +33,8 @@ chip_openthread_ftd = false # Disable CHIP Logging chip_progress_logging = false + +# Dsiable verbose logs for all-clusters app to save Flash chip_detail_logging = false chip_automation_logging = false diff --git a/examples/all-clusters-app/cc13x2x7_26x2x7/main/AppTask.cpp b/examples/all-clusters-app/cc13x2x7_26x2x7/main/AppTask.cpp index f99dd873e2fd60..abb73838493316 100644 --- a/examples/all-clusters-app/cc13x2x7_26x2x7/main/AppTask.cpp +++ b/examples/all-clusters-app/cc13x2x7_26x2x7/main/AppTask.cpp @@ -30,6 +30,13 @@ #include +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR +#include +#include +#include +#include +#include +#endif #include #include @@ -62,6 +69,26 @@ AppTask AppTask::sAppTask; constexpr EndpointId kNetworkCommissioningEndpointSecondary = 0xFFFE; +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR +static DefaultOTARequestor sRequestorCore; +static DefaultOTARequestorStorage sRequestorStorage; +static DefaultOTARequestorDriver sRequestorUser; +static BDXDownloader sDownloader; +static OTAImageProcessorImpl sImageProcessor; + +void InitializeOTARequestor(void) +{ + // Initialize and interconnect the Requestor and Image Processor objects + SetRequestorInstance(&sRequestorCore); + + sRequestorStorage.Init(Server::GetInstance().GetPersistentStorage()); + sRequestorCore.Init(Server::GetInstance(), sRequestorStorage, sRequestorUser, sDownloader); + sImageProcessor.SetOTADownloader(&sDownloader); + sDownloader.SetImageProcessorDelegate(&sImageProcessor); + sRequestorUser.Init(&sRequestorCore, &sImageProcessor); +} +#endif + #ifdef AUTO_PRINT_METRICS static void printMetrics(void) { @@ -238,6 +265,9 @@ int AppTask::Init() // this function will happen on the CHIP event loop thread, not the app_main thread. PlatformMgr().AddEventHandler(DeviceEventCallback, reinterpret_cast(nullptr)); +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR + InitializeOTARequestor(); +#endif // QR code will be used with CHIP Tool PrintOnboardingCodes(RendezvousInformationFlags(RendezvousInformationFlag::kBLE)); diff --git a/examples/all-clusters-app/esp32/main/include/ShellCommands.h b/examples/all-clusters-app/esp32/main/include/ShellCommands.h index aea17cca138523..3e4e8536cee962 100644 --- a/examples/all-clusters-app/esp32/main/include/ShellCommands.h +++ b/examples/all-clusters-app/esp32/main/include/ShellCommands.h @@ -125,10 +125,10 @@ class CASECommands // Register the CASESession commands void Register(); - void SetFabricInfo(FabricInfo * fabricInfo) { mFabricInfo = fabricInfo; } + void SetFabricInfo(const FabricInfo * fabricInfo) { mFabricInfo = fabricInfo; } void SetNodeId(NodeId nodeId) { mNodeId = nodeId; } void SetOnConnecting(bool onConnecting) { mOnConnecting = onConnecting; } - FabricInfo * GetFabricInfo(void) { return mFabricInfo; } + const FabricInfo * GetFabricInfo(void) { return mFabricInfo; } NodeId GetNodeId(void) { return mNodeId; } bool GetOnConnecting(void) { return mOnConnecting; } @@ -167,7 +167,7 @@ class CASECommands return CHIP_ERROR_INCORRECT_STATE; } const FabricIndex fabricIndex = static_cast(strtoul(argv[0], nullptr, 10)); - FabricInfo * fabricInfo = Server::GetInstance().GetFabricTable().FindFabricWithIndex(fabricIndex); + const FabricInfo * fabricInfo = Server::GetInstance().GetFabricTable().FindFabricWithIndex(fabricIndex); if (fabricInfo == nullptr) { @@ -201,9 +201,9 @@ class CASECommands static Callback::Callback sOnConnectedCallback; static Callback::Callback sOnConnectionFailureCallback; static Shell::Engine sSubShell; - FabricInfo * mFabricInfo = nullptr; - NodeId mNodeId = 0; - bool mOnConnecting = false; + const FabricInfo * mFabricInfo = nullptr; + NodeId mNodeId = 0; + bool mOnConnecting = false; }; } // namespace Shell diff --git a/examples/all-clusters-app/nxp/mw320/README.md b/examples/all-clusters-app/nxp/mw320/README.md index 14699b15a29a48..af1c09621398d8 100755 --- a/examples/all-clusters-app/nxp/mw320/README.md +++ b/examples/all-clusters-app/nxp/mw320/README.md @@ -49,6 +49,16 @@ Note: 2. "source third_party/connectedhomeip/scripts/activate.sh" can be omitted if your environment is already setup without issues. +Tinycrypt ECC operations: + +Note: This solution is temporary. + +In order to use the tinycrypt ecc operations, use the following build arguments: + +``` +$ gn gen out/debug --args='treat_warnings_as_errors=false mbedtls_repo="//third_party/connectedhomeip/third_party/nxp/libs/mbedtls" mbedtls_use_tinycrypt=true' +``` + ## Flashing diff --git a/examples/all-clusters-app/nxp/mw320/main.cpp b/examples/all-clusters-app/nxp/mw320/main.cpp index bf7f1fc454fd4a..52fbb40964aeff 100644 --- a/examples/all-clusters-app/nxp/mw320/main.cpp +++ b/examples/all-clusters-app/nxp/mw320/main.cpp @@ -1411,8 +1411,7 @@ static void OnSwitchAttributeChangeCallback(EndpointId endpointId, AttributeId a /* Callback to receive the cluster modification event */ -void MatterPostAttributeChangeCallback(const chip::app::ConcreteAttributePath & path, uint8_t mask, uint8_t type, uint16_t size, - uint8_t * value) +void MatterPostAttributeChangeCallback(const chip::app::ConcreteAttributePath & path, uint8_t type, uint16_t size, uint8_t * value) { PRINTF("==> MatterPostAttributeChangeCallback, cluster: %x, attr: %x, size: %d \r\n", path.mClusterId, path.mAttributeId, size); // path.mEndpointId, path.mClusterId, path.mAttributeId, mask, type, size, value diff --git a/examples/all-clusters-app/p6/include/AppTask.h b/examples/all-clusters-app/p6/include/AppTask.h index 5bb9a9163225bf..499de504d53ee6 100644 --- a/examples/all-clusters-app/p6/include/AppTask.h +++ b/examples/all-clusters-app/p6/include/AppTask.h @@ -43,7 +43,7 @@ class AppTask public: CHIP_ERROR StartAppTask(); static void AppTaskMain(void * pvParameter); - static void LightActionEventHandler(AppEvent * aEvent); + static void LightActionEventHandler(AppEvent * event); void ButtonEventHandler(uint8_t btnIdx, uint8_t btnAction); void PostEvent(const AppEvent * event); void InitOTARequestor(); diff --git a/examples/all-clusters-app/p6/src/AppTask.cpp b/examples/all-clusters-app/p6/src/AppTask.cpp index c8717c30740357..4b99db7e7f38d3 100644 --- a/examples/all-clusters-app/p6/src/AppTask.cpp +++ b/examples/all-clusters-app/p6/src/AppTask.cpp @@ -220,7 +220,7 @@ void AppTask::AppTaskMain(void * pvParameter) } } -void AppTask::LightActionEventHandler(AppEvent * aEvent) +void AppTask::LightActionEventHandler(AppEvent * event) { /* ON/OFF Light Led based on Button interrupt */ sLightLED.Invert(); @@ -248,7 +248,7 @@ void AppTask::ButtonEventHandler(uint8_t btnIdx, uint8_t btnAction) } } -void AppTask::PostEvent(const AppEvent * aEvent) +void AppTask::PostEvent(const AppEvent * event) { if (sAppEventQueue != NULL) { @@ -256,7 +256,7 @@ void AppTask::PostEvent(const AppEvent * aEvent) if (xPortIsInsideInterrupt()) { BaseType_t higherPrioTaskWoken = pdFALSE; - status = xQueueSendFromISR(sAppEventQueue, aEvent, &higherPrioTaskWoken); + status = xQueueSendFromISR(sAppEventQueue, event, &higherPrioTaskWoken); #ifdef portYIELD_FROM_ISR portYIELD_FROM_ISR(higherPrioTaskWoken); @@ -268,7 +268,7 @@ void AppTask::PostEvent(const AppEvent * aEvent) } else { - status = xQueueSend(sAppEventQueue, aEvent, 1); + status = xQueueSend(sAppEventQueue, event, 1); } if (!status) @@ -280,11 +280,11 @@ void AppTask::PostEvent(const AppEvent * aEvent) } } -void AppTask::DispatchEvent(AppEvent * aEvent) +void AppTask::DispatchEvent(AppEvent * event) { - if (aEvent->Handler) + if (event->Handler) { - aEvent->Handler(aEvent); + event->Handler(event); } else { diff --git a/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/args.gni b/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/args.gni index e7438af247a9c2..8f67c7f5edd0c7 100644 --- a/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/args.gni +++ b/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/args.gni @@ -32,7 +32,9 @@ chip_enable_ota_requestor = false chip_openthread_ftd = false # Disable CHIP Logging -#chip_progress_logging = false +chip_progress_logging = false + +# Dsiable verbose logs for all-clusters app to save Flash chip_detail_logging = false chip_automation_logging = false diff --git a/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/main/AppTask.cpp b/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/main/AppTask.cpp index f99dd873e2fd60..abb73838493316 100644 --- a/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/main/AppTask.cpp +++ b/examples/all-clusters-minimal-app/cc13x2x7_26x2x7/main/AppTask.cpp @@ -30,6 +30,13 @@ #include +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR +#include +#include +#include +#include +#include +#endif #include #include @@ -62,6 +69,26 @@ AppTask AppTask::sAppTask; constexpr EndpointId kNetworkCommissioningEndpointSecondary = 0xFFFE; +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR +static DefaultOTARequestor sRequestorCore; +static DefaultOTARequestorStorage sRequestorStorage; +static DefaultOTARequestorDriver sRequestorUser; +static BDXDownloader sDownloader; +static OTAImageProcessorImpl sImageProcessor; + +void InitializeOTARequestor(void) +{ + // Initialize and interconnect the Requestor and Image Processor objects + SetRequestorInstance(&sRequestorCore); + + sRequestorStorage.Init(Server::GetInstance().GetPersistentStorage()); + sRequestorCore.Init(Server::GetInstance(), sRequestorStorage, sRequestorUser, sDownloader); + sImageProcessor.SetOTADownloader(&sDownloader); + sDownloader.SetImageProcessorDelegate(&sImageProcessor); + sRequestorUser.Init(&sRequestorCore, &sImageProcessor); +} +#endif + #ifdef AUTO_PRINT_METRICS static void printMetrics(void) { @@ -238,6 +265,9 @@ int AppTask::Init() // this function will happen on the CHIP event loop thread, not the app_main thread. PlatformMgr().AddEventHandler(DeviceEventCallback, reinterpret_cast(nullptr)); +#if CHIP_DEVICE_CONFIG_ENABLE_OTA_REQUESTOR + InitializeOTARequestor(); +#endif // QR code will be used with CHIP Tool PrintOnboardingCodes(RendezvousInformationFlags(RendezvousInformationFlag::kBLE)); diff --git a/examples/all-clusters-minimal-app/esp32/main/include/ShellCommands.h b/examples/all-clusters-minimal-app/esp32/main/include/ShellCommands.h index aea17cca138523..3e4e8536cee962 100644 --- a/examples/all-clusters-minimal-app/esp32/main/include/ShellCommands.h +++ b/examples/all-clusters-minimal-app/esp32/main/include/ShellCommands.h @@ -125,10 +125,10 @@ class CASECommands // Register the CASESession commands void Register(); - void SetFabricInfo(FabricInfo * fabricInfo) { mFabricInfo = fabricInfo; } + void SetFabricInfo(const FabricInfo * fabricInfo) { mFabricInfo = fabricInfo; } void SetNodeId(NodeId nodeId) { mNodeId = nodeId; } void SetOnConnecting(bool onConnecting) { mOnConnecting = onConnecting; } - FabricInfo * GetFabricInfo(void) { return mFabricInfo; } + const FabricInfo * GetFabricInfo(void) { return mFabricInfo; } NodeId GetNodeId(void) { return mNodeId; } bool GetOnConnecting(void) { return mOnConnecting; } @@ -167,7 +167,7 @@ class CASECommands return CHIP_ERROR_INCORRECT_STATE; } const FabricIndex fabricIndex = static_cast(strtoul(argv[0], nullptr, 10)); - FabricInfo * fabricInfo = Server::GetInstance().GetFabricTable().FindFabricWithIndex(fabricIndex); + const FabricInfo * fabricInfo = Server::GetInstance().GetFabricTable().FindFabricWithIndex(fabricIndex); if (fabricInfo == nullptr) { @@ -201,9 +201,9 @@ class CASECommands static Callback::Callback sOnConnectedCallback; static Callback::Callback sOnConnectionFailureCallback; static Shell::Engine sSubShell; - FabricInfo * mFabricInfo = nullptr; - NodeId mNodeId = 0; - bool mOnConnecting = false; + const FabricInfo * mFabricInfo = nullptr; + NodeId mNodeId = 0; + bool mOnConnecting = false; }; } // namespace Shell diff --git a/examples/chef/chef.py b/examples/chef/chef.py index 9de079a43a3108..fb075f27a49ff9 100755 --- a/examples/chef/chef.py +++ b/examples/chef/chef.py @@ -150,10 +150,7 @@ def bundle(platform: str, device_name: str) -> None: flush_print(f"No bundle function for {platform}!") exit(1) flush_print(f"Copying {matter_file}") - src_item = os.path.join(_REPO_BASE_PATH, - "zzz_generated", - "chef-"+device_name, - "zap-generated", + src_item = os.path.join(_DEVICE_FOLDER, matter_file) dest_item = os.path.join(_CD_STAGING_DIR, matter_file) shutil.copy(src_item, dest_item) @@ -322,8 +319,15 @@ def main(argv: Sequence[str]) -> None: dest="use_zzz", action="store_true") parser.add_option("", "--build_all", help="For use in CD only. Builds and bundles all chef examples for the specified platform. Uses --use_zzz. Chef exits after completion.", dest="build_all", action="store_true") + parser.add_option("-k", "--keep_going", help="For use in CD only. Continues building all sample apps in the event of an error.", + dest="keep_going", action="store_true") parser.add_option( "", "--ci", help="Builds Chef examples defined in cicd_config. Uses --use_zzz. Uses specified target from -t. Chef exits after completion.", dest="ci", action="store_true") + parser.add_option( + "", "--ipv6only", help="Compile build which only supports ipv6. Linux only.", + action="store_true") + parser.add_option( + "", "--cpu_type", help="CPU type to compile for. Linux only.", choices=["arm64", "x64"]) options, _ = parser.parse_args(argv) @@ -387,13 +391,17 @@ def main(argv: Sequence[str]) -> None: except RuntimeError as build_fail_error: failed_builds.append((device_name, platform, "build")) flush_print(str(build_fail_error)) - break + if not options.keep_going: + exit(1) + continue try: bundle(platform, device_name) except FileNotFoundError as bundle_fail_error: failed_builds.append((device_name, platform, "bundle")) flush_print(str(bundle_fail_error)) - break + if not options.keep_going: + exit(1) + continue archive_name = f"{label}-{device_name}" archive_full_name = archive_prefix + archive_name + archive_suffix flush_print(f"Adding build output to archive {archive_full_name}") @@ -593,17 +601,48 @@ def main(argv: Sequence[str]) -> None: elif options.build_target == "linux": shell.run_cmd(f"cd {_CHEF_SCRIPT_PATH}/linux") + + linux_args = [] + if options.do_rpc: + linux_args.append('import("//with_pw_rpc.gni")') + linux_args.extend([ + 'import("//build_overrides/chip.gni")', + 'import("${chip_root}/config/standalone/args.gni")', + 'chip_shell_cmd_server = false', + 'chip_build_libshell = true', + 'chip_config_network_layer_ble = false', + f'target_defines = ["CHIP_DEVICE_CONFIG_DEVICE_VENDOR_ID={options.vid}", "CHIP_DEVICE_CONFIG_DEVICE_PRODUCT_ID={options.pid}", "CONFIG_ENABLE_PW_RPC={int(options.do_rpc)}"]', + ]) + if options.cpu_type == "arm64": + uname_resp = shell.run_cmd("uname -m", return_cmd_output=True) + if "aarch" not in uname_resp and "arm" not in uname_resp: + if ( + "aarch" not in uname_resp and + "arm" not in uname_resp and + "SYSROOT_AARCH64" not in shell.env): + flush_print( + "SYSROOT_AARCH64 env variable not set. " + "AARCH64 toolchain needed for cross-compiling for arm64.") + exit(1) + shell.env["PKG_CONFIG_PATH"] = ( + f'{shell.env["SYSROOT_AARCH64"]}/lib/aarch64-linux-gnu/pkgconfig') + linux_args.append('target_cpu="arm64"') + linux_args.append('is_clang=true') + linux_args.append('chip_crypto="mbedtls"') + linux_args.append(f'sysroot="{shell.env["SYSROOT_AARCH64"]}"') + elif options.cpu_type == "x64": + uname_resp = shell.run_cmd("uname -m", return_cmd_output=True) + if "x64" not in uname_resp and "x86_64" not in uname_resp: + flush_print(f"Unable to cross compile for x64 on {uname_resp}") + exit(1) + if options.ipv6only: + linux_args.append("chip_inet_config_enable_ipv4=false") + + if sw_ver_string: + linux_args.append( + f'chip_device_config_device_software_version_string = "{sw_ver_string}"') with open(f"{_CHEF_SCRIPT_PATH}/linux/args.gni", "w") as f: - sw_ver_string_config_text = f"chip_device_config_device_software_version_string = \"{sw_ver_string}\"" if sw_ver_string else "" - f.write(textwrap.dedent(f"""\ - import("//build_overrides/chip.gni") - import("${{chip_root}}/config/standalone/args.gni") - chip_shell_cmd_server = false - chip_build_libshell = true - chip_config_network_layer_ble = false - target_defines = ["CHIP_DEVICE_CONFIG_DEVICE_VENDOR_ID={options.vid}", "CHIP_DEVICE_CONFIG_DEVICE_PRODUCT_ID={options.pid}", "CONFIG_ENABLE_PW_RPC={'1' if options.do_rpc else '0'}"] - {sw_ver_string_config_text} - """)) + f.write("\n".join(linux_args)) with open(f"{_CHEF_SCRIPT_PATH}/linux/sample.gni", "w") as f: f.write(textwrap.dedent(f"""\ sample_zap_file = "{options.sample_device_type_name}.zap" @@ -611,11 +650,7 @@ def main(argv: Sequence[str]) -> None: """)) if options.do_clean: shell.run_cmd(f"rm -rf out") - if options.do_rpc: - shell.run_cmd( - "gn gen out --args='import(\"//with_pw_rpc.gni\")'") - else: - shell.run_cmd("gn gen out --args=''") + shell.run_cmd("gn gen out") shell.run_cmd("ninja -C out") # diff --git a/zzz_generated/chef-rootnode_contactsensor_lFAGG1bfRO/zap-generated/rootnode_contactsensor_lFAGG1bfRO.matter b/examples/chef/devices/rootnode_contactsensor_lFAGG1bfRO.matter similarity index 100% rename from zzz_generated/chef-rootnode_contactsensor_lFAGG1bfRO/zap-generated/rootnode_contactsensor_lFAGG1bfRO.matter rename to examples/chef/devices/rootnode_contactsensor_lFAGG1bfRO.matter diff --git a/zzz_generated/chef-rootnode_dimmablelight_bCwGYSDpoe/zap-generated/rootnode_dimmablelight_bCwGYSDpoe.matter b/examples/chef/devices/rootnode_dimmablelight_bCwGYSDpoe.matter similarity index 100% rename from zzz_generated/chef-rootnode_dimmablelight_bCwGYSDpoe/zap-generated/rootnode_dimmablelight_bCwGYSDpoe.matter rename to examples/chef/devices/rootnode_dimmablelight_bCwGYSDpoe.matter diff --git a/zzz_generated/chef-rootnode_flowsensor_1zVxHedlaV/zap-generated/rootnode_flowsensor_1zVxHedlaV.matter b/examples/chef/devices/rootnode_flowsensor_1zVxHedlaV.matter similarity index 100% rename from zzz_generated/chef-rootnode_flowsensor_1zVxHedlaV/zap-generated/rootnode_flowsensor_1zVxHedlaV.matter rename to examples/chef/devices/rootnode_flowsensor_1zVxHedlaV.matter diff --git a/zzz_generated/chef-rootnode_heatingcoolingunit_ncdGai1E5a/zap-generated/rootnode_heatingcoolingunit_ncdGai1E5a.matter b/examples/chef/devices/rootnode_heatingcoolingunit_ncdGai1E5a.matter similarity index 100% rename from zzz_generated/chef-rootnode_heatingcoolingunit_ncdGai1E5a/zap-generated/rootnode_heatingcoolingunit_ncdGai1E5a.matter rename to examples/chef/devices/rootnode_heatingcoolingunit_ncdGai1E5a.matter diff --git a/zzz_generated/chef-rootnode_humiditysensor_Xyj4gda6Hb/zap-generated/rootnode_humiditysensor_Xyj4gda6Hb.matter b/examples/chef/devices/rootnode_humiditysensor_Xyj4gda6Hb.matter similarity index 100% rename from zzz_generated/chef-rootnode_humiditysensor_Xyj4gda6Hb/zap-generated/rootnode_humiditysensor_Xyj4gda6Hb.matter rename to examples/chef/devices/rootnode_humiditysensor_Xyj4gda6Hb.matter diff --git a/zzz_generated/chef-rootnode_occupancysensor_iHyVgifZuo/zap-generated/rootnode_occupancysensor_iHyVgifZuo.matter b/examples/chef/devices/rootnode_occupancysensor_iHyVgifZuo.matter similarity index 100% rename from zzz_generated/chef-rootnode_occupancysensor_iHyVgifZuo/zap-generated/rootnode_occupancysensor_iHyVgifZuo.matter rename to examples/chef/devices/rootnode_occupancysensor_iHyVgifZuo.matter diff --git a/zzz_generated/chef-rootnode_onofflightswitch_FsPlMr090Q/zap-generated/rootnode_onofflightswitch_FsPlMr090Q.matter b/examples/chef/devices/rootnode_onofflightswitch_FsPlMr090Q.matter similarity index 100% rename from zzz_generated/chef-rootnode_onofflightswitch_FsPlMr090Q/zap-generated/rootnode_onofflightswitch_FsPlMr090Q.matter rename to examples/chef/devices/rootnode_onofflightswitch_FsPlMr090Q.matter diff --git a/zzz_generated/chef-rootnode_onoffpluginunit_Wtf8ss5EBY/zap-generated/rootnode_onoffpluginunit_Wtf8ss5EBY.matter b/examples/chef/devices/rootnode_onoffpluginunit_Wtf8ss5EBY.matter similarity index 100% rename from zzz_generated/chef-rootnode_onoffpluginunit_Wtf8ss5EBY/zap-generated/rootnode_onoffpluginunit_Wtf8ss5EBY.matter rename to examples/chef/devices/rootnode_onoffpluginunit_Wtf8ss5EBY.matter diff --git a/zzz_generated/chef-rootnode_pressuresensor_s0qC9wLH4k/zap-generated/rootnode_pressuresensor_s0qC9wLH4k.matter b/examples/chef/devices/rootnode_pressuresensor_s0qC9wLH4k.matter similarity index 100% rename from zzz_generated/chef-rootnode_pressuresensor_s0qC9wLH4k/zap-generated/rootnode_pressuresensor_s0qC9wLH4k.matter rename to examples/chef/devices/rootnode_pressuresensor_s0qC9wLH4k.matter diff --git a/zzz_generated/chef-rootnode_speaker_RpzeXdimqA/zap-generated/rootnode_speaker_RpzeXdimqA.matter b/examples/chef/devices/rootnode_speaker_RpzeXdimqA.matter similarity index 100% rename from zzz_generated/chef-rootnode_speaker_RpzeXdimqA/zap-generated/rootnode_speaker_RpzeXdimqA.matter rename to examples/chef/devices/rootnode_speaker_RpzeXdimqA.matter diff --git a/zzz_generated/chef-rootnode_temperaturesensor_Qy1zkNW7c3/zap-generated/rootnode_temperaturesensor_Qy1zkNW7c3.matter b/examples/chef/devices/rootnode_temperaturesensor_Qy1zkNW7c3.matter similarity index 100% rename from zzz_generated/chef-rootnode_temperaturesensor_Qy1zkNW7c3/zap-generated/rootnode_temperaturesensor_Qy1zkNW7c3.matter rename to examples/chef/devices/rootnode_temperaturesensor_Qy1zkNW7c3.matter diff --git a/zzz_generated/chef-rootnode_thermostat_bm3fb8dhYi/zap-generated/rootnode_thermostat_bm3fb8dhYi.matter b/examples/chef/devices/rootnode_thermostat_bm3fb8dhYi.matter similarity index 100% rename from zzz_generated/chef-rootnode_thermostat_bm3fb8dhYi/zap-generated/rootnode_thermostat_bm3fb8dhYi.matter rename to examples/chef/devices/rootnode_thermostat_bm3fb8dhYi.matter diff --git a/zzz_generated/chef-rootnode_windowcovering_RLCxaGi9Yx/zap-generated/rootnode_windowcovering_RLCxaGi9Yx.matter b/examples/chef/devices/rootnode_windowcovering_RLCxaGi9Yx.matter similarity index 100% rename from zzz_generated/chef-rootnode_windowcovering_RLCxaGi9Yx/zap-generated/rootnode_windowcovering_RLCxaGi9Yx.matter rename to examples/chef/devices/rootnode_windowcovering_RLCxaGi9Yx.matter diff --git a/examples/chef/linux/with_pw_rpc.gni b/examples/chef/linux/with_pw_rpc.gni index 9533565ecf4ce8..756a93469bf072 100644 --- a/examples/chef/linux/with_pw_rpc.gni +++ b/examples/chef/linux/with_pw_rpc.gni @@ -32,6 +32,7 @@ pw_rpc_system_server_BACKEND = "${chip_root}/config/linux/lib/pw_rpc:pw_rpc" dir_pw_third_party_nanopb = "${chip_root}/third_party/nanopb/repo" pw_chrono_SYSTEM_CLOCK_BACKEND = "$dir_pw_chrono_stl:system_clock" pw_sync_MUTEX_BACKEND = "$dir_pw_sync_stl:mutex_backend" +pw_span_ENABLE_STD_SPAN_POLYFILL = false pw_build_LINK_DEPS = [ "$dir_pw_assert:impl", @@ -40,3 +41,4 @@ pw_build_LINK_DEPS = [ chip_enable_pw_rpc = true chip_build_pw_trace_lib = true +chip_use_pw_logging = true diff --git a/examples/chip-tool/commands/clusters/ModelCommand.cpp b/examples/chip-tool/commands/clusters/ModelCommand.cpp index 9c06ed70b0edd1..fd255abfe204b0 100644 --- a/examples/chip-tool/commands/clusters/ModelCommand.cpp +++ b/examples/chip-tool/commands/clusters/ModelCommand.cpp @@ -66,7 +66,8 @@ void ModelCommand::OnDeviceConnectionFailureFn(void * context, PeerId peerId, CH void ModelCommand::Shutdown() { - ResetArguments(); mOnDeviceConnectedCallback.Cancel(); mOnDeviceConnectionFailureCallback.Cancel(); + + CHIPCommand::Shutdown(); } diff --git a/examples/chip-tool/commands/common/CHIPCommand.h b/examples/chip-tool/commands/common/CHIPCommand.h index e4cebf6095cc35..7688a34873bfbe 100644 --- a/examples/chip-tool/commands/common/CHIPCommand.h +++ b/examples/chip-tool/commands/common/CHIPCommand.h @@ -101,7 +101,7 @@ class CHIPCommand : public Command // Shut down the command. After a Shutdown call the command object is ready // to be used for another command invocation. - virtual void Shutdown() {} + virtual void Shutdown() { ResetArguments(); } // Clean up any resources allocated by the command. Some commands may hold // on to resources after Shutdown(), but Cleanup() will guarantee those are diff --git a/examples/chip-tool/commands/common/Command.cpp b/examples/chip-tool/commands/common/Command.cpp index 1931d88e39752f..59812076cfde75 100644 --- a/examples/chip-tool/commands/common/Command.cpp +++ b/examples/chip-tool/commands/common/Command.cpp @@ -355,7 +355,7 @@ bool Command::InitArgument(size_t argIndex, char * argValue) isValidArgument = HandleNullableOptional(arg, argValue, [&](auto * value) { // We support two ways to pass an octet string argument. If it happens // to be all-ASCII, you can just pass it in. Otherwise you can pass in - // 0x followed by the hex-encoded bytes. + // "hex:" followed by the hex-encoded bytes. size_t argLen = strlen(argValue); static constexpr char hexPrefix[] = "hex:"; constexpr size_t prefixLen = ArraySize(hexPrefix) - 1; // Don't count the null @@ -854,44 +854,154 @@ size_t Command::AddArgumentToList(Argument && argument) return 0; } +namespace { +template +void ResetOptionalArg(const Argument & arg) +{ + VerifyOrDie(arg.isOptional()); + + if (arg.isNullable()) + { + reinterpret_cast> *>(arg.value)->ClearValue(); + } + else + { + reinterpret_cast *>(arg.value)->ClearValue(); + } +} +} // anonymous namespace + void Command::ResetArguments() { for (size_t i = 0; i < mArgs.size(); i++) { const Argument arg = mArgs[i]; const ArgumentType type = arg.type; - const uint8_t flags = arg.flags; - if (type == ArgumentType::VectorBool && flags == Argument::kOptional) + if (arg.isOptional()) { - auto vectorArgument = static_cast *>(arg.value); - vectorArgument->clear(); - } - else if (type == ArgumentType::Vector16 && flags != Argument::kOptional) - { - auto vectorArgument = static_cast *>(arg.value); - vectorArgument->clear(); - } - else if (type == ArgumentType::Vector32 && flags != Argument::kOptional) - { - auto vectorArgument = static_cast *>(arg.value); - vectorArgument->clear(); - } - else if (type == ArgumentType::Vector32 && flags == Argument::kOptional) - { - auto optionalArgument = static_cast> *>(arg.value); - if (optionalArgument->HasValue()) + // Must always clean these up so they don't carry over to the next + // command invocation in interactive mode. + switch (type) { - optionalArgument->Value().clear(); + case ArgumentType::Complex: { + // No optional complex arguments so far. + VerifyOrDie(false); + break; + } + case ArgumentType::Custom: { + // No optional custom arguments so far. + VerifyOrDie(false); + break; + } + case ArgumentType::VectorBool: { + auto vectorArgument = static_cast *>(arg.value); + vectorArgument->clear(); + break; + } + case ArgumentType::Vector16: { + // No optional Vector16 arguments so far. + VerifyOrDie(false); + break; + } + case ArgumentType::Vector32: { + ResetOptionalArg>(arg); + break; + } + case ArgumentType::VectorCustom: { + // No optional VectorCustom arguments so far. + VerifyOrDie(false); + break; + } + case ArgumentType::Attribute: { + // No optional Attribute arguments so far. + VerifyOrDie(false); + break; + } + case ArgumentType::String: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::CharString: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::OctetString: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Bool: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_uint8: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_uint16: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_uint32: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_uint64: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_int8: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_int16: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_int32: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Number_int64: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Float: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Double: { + ResetOptionalArg(arg); + break; + } + case ArgumentType::Address: { + ResetOptionalArg(arg); + break; + } } } - else if (type == ArgumentType::VectorCustom && flags != Argument::kOptional) + else { - auto vectorArgument = static_cast *>(arg.value); - for (auto & customArgument : *vectorArgument) + // Some non-optional arguments have state that needs to be cleaned + // up too. + if (type == ArgumentType::Vector16) { - delete customArgument; + auto vectorArgument = static_cast *>(arg.value); + vectorArgument->clear(); + } + else if (type == ArgumentType::Vector32) + { + auto vectorArgument = static_cast *>(arg.value); + vectorArgument->clear(); + } + else if (type == ArgumentType::VectorCustom) + { + auto vectorArgument = static_cast *>(arg.value); + for (auto & customArgument : *vectorArgument) + { + delete customArgument; + } + vectorArgument->clear(); } - vectorArgument->clear(); } } } diff --git a/examples/chip-tool/commands/discover/DiscoverCommissionersCommand.cpp b/examples/chip-tool/commands/discover/DiscoverCommissionersCommand.cpp index 3cb3cf6153f129..b2fe353931328a 100644 --- a/examples/chip-tool/commands/discover/DiscoverCommissionersCommand.cpp +++ b/examples/chip-tool/commands/discover/DiscoverCommissionersCommand.cpp @@ -42,4 +42,6 @@ void DiscoverCommissionersCommand::Shutdown() ChipLogProgress(chipTool, "Total of %d commissioner(s) discovered in %u sec", commissionerCount, std::chrono::duration_cast(GetWaitDuration()).count()); + + CHIPCommand::Shutdown(); } diff --git a/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.cpp b/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.cpp index a9f18be49cd1e6..74abe2a0d58d16 100644 --- a/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.cpp +++ b/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.cpp @@ -17,6 +17,7 @@ */ #include "SetupPayloadGenerateCommand.h" +#include #include #include #include @@ -62,9 +63,14 @@ CHIP_ERROR SetupPayloadGenerateQRCodeCommand::Run() { SetupPayload payload; - if (mPayload.HasValue()) + if (mExistingPayload.HasValue()) { - QRCodeSetupPayloadParser(mPayload.Value()).populatePayload(payload); + CHIP_ERROR err = QRCodeSetupPayloadParser(mExistingPayload.Value()).populatePayload(payload); + if (err != CHIP_NO_ERROR) + { + ChipLogError(chipTool, "Invalid existing payload: %" CHIP_ERROR_FORMAT, err.Format()); + return err; + } } ConfigurePayload(payload); @@ -74,23 +80,135 @@ CHIP_ERROR SetupPayloadGenerateQRCodeCommand::Run() payload.rendezvousInformation.SetRaw(mRendezvous.Value()); } + if (mTLVBytes.HasValue()) + { + CHIP_ERROR err = PopulatePayloadTLVFromBytes(payload, mTLVBytes.Value()); + if (err != CHIP_NO_ERROR) + { + ChipLogError(chipTool, "Unable to populate payload TLV: %" CHIP_ERROR_FORMAT, err.Format()); + return err; + } + } + QRCodeSetupPayloadGenerator generator(payload); generator.SetAllowInvalidPayload(mAllowInvalidPayload.ValueOr(false)); std::string code; - ReturnErrorOnFailure(generator.payloadBase38Representation(code)); + ReturnErrorOnFailure(generator.payloadBase38RepresentationWithAutoTLVBuffer(code)); ChipLogProgress(chipTool, "QR Code: %s", code.c_str()); return CHIP_NO_ERROR; } +CHIP_ERROR SetupPayloadGenerateQRCodeCommand::PopulatePayloadTLVFromBytes(SetupPayload & payload, const ByteSpan & tlvBytes) +{ + // First clear out all the existing TVL bits from the payload. Ignore + // errors here, because we don't care if those bits are not present. + payload.removeSerialNumber(); + + auto existingVendorData = payload.getAllOptionalVendorData(); + for (auto & data : existingVendorData) + { + payload.removeOptionalVendorData(data.tag); + } + + if (tlvBytes.empty()) + { + // Used to just clear out the existing TLV. + return CHIP_NO_ERROR; + } + + TLV::TLVReader reader; + reader.Init(tlvBytes); + + // Data is a TLV structure. + ReturnErrorOnFailure(reader.Next(TLV::kTLVType_Structure, TLV::AnonymousTag())); + + TLV::TLVType outerType; + ReturnErrorOnFailure(reader.EnterContainer(outerType)); + + CHIP_ERROR err; + while ((err = reader.Next()) == CHIP_NO_ERROR) + { + TLV::Tag tag = reader.GetTag(); + if (!TLV::IsContextTag(tag)) + { + ChipLogError(chipTool, "Unexpected non-context TLV tag."); + return CHIP_ERROR_INVALID_TLV_TAG; + } + + uint8_t tagNum = static_cast(TLV::TagNumFromTag(tag)); + if (tagNum < 0x80) + { + // Matter-common tag. + if (tagNum != kSerialNumberTag) + { + ChipLogError(chipTool, "No support yet for Matter-common tags other than serial number"); + return CHIP_ERROR_UNSUPPORTED_CHIP_FEATURE; + } + + // Serial number can be a string or an unsigned integer. + if (reader.GetType() == TLV::kTLVType_UTF8String) + { + CharSpan data; + ReturnErrorOnFailure(reader.Get(data)); + ReturnErrorOnFailure(payload.addSerialNumber(std::string(data.data(), data.size()))); + continue; + } + + if (reader.GetType() == TLV::kTLVType_UnsignedInteger) + { + uint32_t value; + ReturnErrorOnFailure(reader.Get(value)); + ReturnErrorOnFailure(payload.addSerialNumber(value)); + continue; + } + + ChipLogError(chipTool, "Unexpected type for serial number: %d", to_underlying(reader.GetType())); + return CHIP_ERROR_WRONG_TLV_TYPE; + } + + // Vendor tag. We support strings and signed integers. + if (reader.GetType() == TLV::kTLVType_UTF8String) + { + CharSpan data; + ReturnErrorOnFailure(reader.Get(data)); + ReturnErrorOnFailure(payload.addOptionalVendorData(tagNum, std::string(data.data(), data.size()))); + continue; + } + + if (reader.GetType() == TLV::kTLVType_SignedInteger) + { + int32_t value; + ReturnErrorOnFailure(reader.Get(value)); + ReturnErrorOnFailure(payload.addOptionalVendorData(tagNum, value)); + continue; + } + + ChipLogError(chipTool, "Unexpected type for vendor data: %d", to_underlying(reader.GetType())); + return CHIP_ERROR_WRONG_TLV_TYPE; + } + + VerifyOrReturnError(err == CHIP_END_OF_TLV, err); + + ReturnErrorOnFailure(reader.ExitContainer(outerType)); + ReturnErrorOnFailure(reader.VerifyEndOfContainer()); + + return CHIP_NO_ERROR; +} + CHIP_ERROR SetupPayloadGenerateManualCodeCommand::Run() { SetupPayload payload; - if (mPayload.HasValue()) + if (mExistingPayload.HasValue()) { - ManualSetupPayloadParser(mPayload.Value()).populatePayload(payload); + CHIP_ERROR err = ManualSetupPayloadParser(mExistingPayload.Value()).populatePayload(payload); + if (err != CHIP_NO_ERROR) + { + ChipLogError(chipTool, "Invalid existing payload: %" CHIP_ERROR_FORMAT, err.Format()); + return err; + } } ConfigurePayload(payload); diff --git a/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.h b/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.h index 9e0d13c7cef3b2..31a16e1d9940a5 100644 --- a/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.h +++ b/examples/chip-tool/commands/payload/SetupPayloadGenerateCommand.h @@ -26,7 +26,7 @@ class SetupPayloadGenerateCommand : public Command public: SetupPayloadGenerateCommand(const char * name) : Command(name) { - AddArgument("payload", &mPayload); + AddArgument("existing-payload", &mExistingPayload, "An existing setup payload to modify based on the other arguments."); AddArgument("discriminator", 0, UINT16_MAX, &mDiscriminator); AddArgument("setup-pin-code", 0, UINT32_MAX, &mSetUpPINCode); AddArgument("version", 0, UINT8_MAX, &mVersion); @@ -44,7 +44,7 @@ class SetupPayloadGenerateCommand : public Command chip::Optional mVersion; chip::Optional mVendorId; chip::Optional mProductId; - chip::Optional mPayload; + chip::Optional mExistingPayload; chip::Optional mCommissioningMode; chip::Optional mAllowInvalidPayload; }; @@ -55,11 +55,18 @@ class SetupPayloadGenerateQRCodeCommand : public SetupPayloadGenerateCommand SetupPayloadGenerateQRCodeCommand() : SetupPayloadGenerateCommand("generate-qrcode") { AddArgument("rendezvous", 0, UINT8_MAX, &mRendezvous); + AddArgument( + "tlvBytes", &mTLVBytes, + "Pre-encoded TLV for the optional part of the payload. A nonempty value should be passed as \"hex:\" followed by the " + "bytes in hex encoding. Passing an empty string to override the TLV in an existing payload is allowed."); } CHIP_ERROR Run() override; private: + static CHIP_ERROR PopulatePayloadTLVFromBytes(chip::SetupPayload & payload, const chip::ByteSpan & tlvBytes); + chip::Optional mRendezvous; + chip::Optional mTLVBytes; }; class SetupPayloadGenerateManualCodeCommand : public SetupPayloadGenerateCommand diff --git a/examples/chip-tool/config/PersistentStorage.cpp b/examples/chip-tool/config/PersistentStorage.cpp index c2bf34349bccab..bd3d1e4e96e843 100644 --- a/examples/chip-tool/config/PersistentStorage.cpp +++ b/examples/chip-tool/config/PersistentStorage.cpp @@ -50,6 +50,29 @@ std::string GetFilename(const char * name) namespace { +std::string EscapeKey(const std::string & key) +{ + std::string escapedKey; + escapedKey.reserve(key.size()); + + for (char c : key) + { + // Replace spaces, non-printable chars, `=` and the escape itself with hex-escaped (C-style) characters. + if ((c <= 0x20) || (c == '=') || (c == '\\') || (c >= 0x7F)) + { + char escaped[5] = { 0 }; + snprintf(escaped, sizeof(escaped), "\\x%02x", (static_cast(c) & 0xff)); + escapedKey += escaped; + } + else + { + escapedKey += c; + } + } + + return escapedKey; +} + std::string StringToBase64(const std::string & value) { std::unique_ptr buffer(new char[BASE64_ENCODED_LEN(value.length())]); @@ -103,31 +126,43 @@ CHIP_ERROR PersistentStorage::SyncGetKeyValue(const char * key, void * value, ui { std::string iniValue; + ReturnErrorCodeIf(((value == nullptr) && (size != 0)), CHIP_ERROR_INVALID_ARGUMENT); + auto section = mConfig.sections[kDefaultSectionName]; - auto it = section.find(key); - ReturnErrorCodeIf(it == section.end(), CHIP_ERROR_PERSISTED_STORAGE_VALUE_NOT_FOUND); - ReturnErrorCodeIf(!inipp::extract(section[key], iniValue), CHIP_ERROR_INVALID_ARGUMENT); + ReturnErrorCodeIf(!SyncDoesKeyExist(key), CHIP_ERROR_PERSISTED_STORAGE_VALUE_NOT_FOUND); + + std::string escapedKey = EscapeKey(key); + ReturnErrorCodeIf(!inipp::extract(section[escapedKey], iniValue), CHIP_ERROR_INVALID_ARGUMENT); iniValue = Base64ToString(iniValue); uint16_t dataSize = static_cast(iniValue.size()); - if (dataSize > size) - { - size = dataSize; - return CHIP_ERROR_BUFFER_TOO_SMALL; - } + ReturnErrorCodeIf(size == 0 && dataSize == 0, CHIP_NO_ERROR); + ReturnErrorCodeIf(value == nullptr, CHIP_ERROR_BUFFER_TOO_SMALL); - size = dataSize; - memcpy(value, iniValue.data(), dataSize); + uint16_t sizeToCopy = std::min(size, dataSize); - return CHIP_NO_ERROR; + memcpy(value, iniValue.data(), sizeToCopy); + size = sizeToCopy; + return size < dataSize ? CHIP_ERROR_BUFFER_TOO_SMALL : CHIP_NO_ERROR; } CHIP_ERROR PersistentStorage::SyncSetKeyValue(const char * key, const void * value, uint16_t size) { + ReturnErrorCodeIf((value == nullptr) && (size != 0), CHIP_ERROR_INVALID_ARGUMENT); + auto section = mConfig.sections[kDefaultSectionName]; - section[key] = StringToBase64(std::string(static_cast(value), size)); + + std::string escapedKey = EscapeKey(key); + if (value == nullptr) + { + section[escapedKey] = ""; + } + else + { + section[escapedKey] = StringToBase64(std::string(static_cast(value), size)); + } mConfig.sections[kDefaultSectionName] = section; return CommitConfig(mName); @@ -136,15 +171,24 @@ CHIP_ERROR PersistentStorage::SyncSetKeyValue(const char * key, const void * val CHIP_ERROR PersistentStorage::SyncDeleteKeyValue(const char * key) { auto section = mConfig.sections[kDefaultSectionName]; - auto it = section.find(key); - ReturnErrorCodeIf(it == section.end(), CHIP_ERROR_PERSISTED_STORAGE_VALUE_NOT_FOUND); - section.erase(key); + ReturnErrorCodeIf(!SyncDoesKeyExist(key), CHIP_ERROR_PERSISTED_STORAGE_VALUE_NOT_FOUND); + + std::string escapedKey = EscapeKey(key); + section.erase(escapedKey); mConfig.sections[kDefaultSectionName] = section; return CommitConfig(mName); } +bool PersistentStorage::SyncDoesKeyExist(const char * key) +{ + std::string escapedKey = EscapeKey(key); + auto section = mConfig.sections[kDefaultSectionName]; + auto it = section.find(escapedKey); + return (it != section.end()); +} + CHIP_ERROR PersistentStorage::SyncClearAll() { ChipLogProgress(chipTool, "Clearing %s storage", kDefaultSectionName); diff --git a/examples/chip-tool/config/PersistentStorage.h b/examples/chip-tool/config/PersistentStorage.h index d26582ea9b9dfa..106dcd64949a87 100644 --- a/examples/chip-tool/config/PersistentStorage.h +++ b/examples/chip-tool/config/PersistentStorage.h @@ -32,6 +32,7 @@ class PersistentStorage : public chip::PersistentStorageDelegate CHIP_ERROR SyncGetKeyValue(const char * key, void * buffer, uint16_t & size) override; CHIP_ERROR SyncSetKeyValue(const char * key, const void * value, uint16_t size) override; CHIP_ERROR SyncDeleteKeyValue(const char * key) override; + bool SyncDoesKeyExist(const char * key) override; uint16_t GetListenPort(); chip::Logging::LogCategory GetLoggingLevel(); diff --git a/examples/common/pigweed/RpcService.cpp b/examples/common/pigweed/RpcService.cpp index 9616b61b49e4be..a60827236f6299 100644 --- a/examples/common/pigweed/RpcService.cpp +++ b/examples/common/pigweed/RpcService.cpp @@ -18,8 +18,8 @@ #include "RpcService.h" +#include "pw_span/span.h" #include -#include #include #include "pw_hdlc/rpc_channel.h" @@ -56,7 +56,7 @@ class ChipRpcChannelOutput : public pw::rpc::ChannelOutput pw::rpc::ChannelOutput(channel_name), mWriter(writer), mAddress(address) {} - pw::Status Send(std::span buffer) override + pw::Status Send(pw::span buffer) override { if (buffer.empty()) { @@ -102,7 +102,7 @@ void Start(void (*RegisterServices)(pw::rpc::Server &), ::chip::rpc::Mutex * uar { uart_mutex->Lock(); } - pw::hdlc::WriteUIFrame(1, std::as_bytes(std::span(log)), sysIoWriter); + pw::hdlc::WriteUIFrame(1, pw::as_bytes(pw::span(log)), sysIoWriter); if (uart_mutex) { uart_mutex->Unlock(); diff --git a/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.cpp b/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.cpp new file mode 100644 index 00000000000000..5061d53e768a6d --- /dev/null +++ b/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.cpp @@ -0,0 +1,27 @@ +/* + * + * Copyright (c) 2021 Project CHIP Authors + * All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "PigweedLoggerMutex.h" + +namespace chip { +namespace rpc { + +PigweedLoggerMutex logger_mutex; + +} // namespace rpc +} // namespace chip diff --git a/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.h b/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.h new file mode 100644 index 00000000000000..4df8b616895e39 --- /dev/null +++ b/examples/common/pigweed/bouffalolab/bl602/PigweedLoggerMutex.h @@ -0,0 +1,54 @@ +/* + * + * Copyright (c) 2021 Project CHIP Authors + * All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "PigweedLogger.h" +#include "pigweed/RpcService.h" +#include "semphr.h" +#include + +namespace chip { +namespace rpc { +class PigweedLoggerMutex : public ::chip::rpc::Mutex +{ + +public: + PigweedLoggerMutex() {} + void Lock() override + { + SemaphoreHandle_t * sem = PigweedLogger::GetSemaphore(); + if (sem) + { + xSemaphoreTake(*sem, portMAX_DELAY); + } + } + void Unlock() override + { + SemaphoreHandle_t * sem = PigweedLogger::GetSemaphore(); + if (sem) + { + xSemaphoreGive(*sem); + } + } +}; + +extern PigweedLoggerMutex logger_mutex; + +} // namespace rpc +} // namespace chip diff --git a/examples/common/pigweed/mbed/Rpc.cpp b/examples/common/pigweed/mbed/Rpc.cpp index e16c3afc816673..65e9d48824160a 100644 --- a/examples/common/pigweed/mbed/Rpc.cpp +++ b/examples/common/pigweed/mbed/Rpc.cpp @@ -123,10 +123,10 @@ Thread * Init() ChipLogProgress(NotSpecified, "RPC service starting...\r\n"); - auto error = rpcThread.start(RunRpcService); + long error = rpcThread.start(RunRpcService); if (error != osOK) { - ChipLogError(NotSpecified, "Run RPC service failed[%d]", error); + ChipLogError(NotSpecified, "Run RPC service failed[%ld]", error); return NULL; } diff --git a/examples/common/pigweed/rpc_console/py/chip_rpc/console.py b/examples/common/pigweed/rpc_console/py/chip_rpc/console.py index 65fdfc034f60ab..c74b6617396e59 100644 --- a/examples/common/pigweed/rpc_console/py/chip_rpc/console.py +++ b/examples/common/pigweed/rpc_console/py/chip_rpc/console.py @@ -253,7 +253,8 @@ def write_to_output(data: bytes, "E": logging.ERROR, "F": logging.FATAL, "V": logging.DEBUG, "D": logging.DEBUG, "": logging.INFO, "": logging.DEBUG, "": logging.ERROR, "": logging.INFO, "": logging.WARNING, - "": logging.ERROR, "": logging.DEBUG} + "": logging.ERROR, "": logging.DEBUG, + "ERR": logging.ERROR, "DBG": logging.DEBUG, "INF": logging.INFO} ESP_CHIP_REGEX = r"(?P[IWEFV]) \((?P