diff --git a/.github/workflows/examples-efr32.yaml b/.github/workflows/examples-efr32.yaml
index 2b73a924b26ee5..d98671cf85e350 100644
--- a/.github/workflows/examples-efr32.yaml
+++ b/.github/workflows/examples-efr32.yaml
@@ -30,6 +30,9 @@ jobs:
env:
EFR32_BOARD: BRD4161A
BUILD_TYPE: gn_efr32
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -57,25 +60,29 @@ jobs:
.environment/pigweed-venv/*.log
- name: Build example EFR32 Lock App for BRD4161A
timeout-minutes: 10
- run:
- scripts/examples/gn_efr32_example.sh examples/lock-app/efr32/
- out/lock_app_debug BRD4161A
+ run: |
+ scripts/examples/gn_efr32_example.sh examples/lock-app/efr32/ out/lock_app_debug BRD4161A
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py efr32 BRD4161A lock-app \
+ out/lock_app_debug/BRD4161A/chip-efr32-lock-example.out /tmp/bloat_reports/
- name: Build example EFR32 Lighting App for BRD4161A
timeout-minutes: 10
- run:
- scripts/examples/gn_efr32_example.sh
- examples/lighting-app/efr32/ out/lighting_app_debug BRD4161A
+ run: |
+ scripts/examples/gn_efr32_example.sh examples/lighting-app/efr32/ out/lighting_app_debug BRD4161A
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py efr32 BRD4161A lighting-app \
+ out/lighting_app_debug/BRD4161A/chip-efr32-lighting-example.out /tmp/bloat_reports/
- name: Build example EFR32 Lighting App for BRD4161A with RPCs
timeout-minutes: 10
- run:
- scripts/examples/gn_efr32_example.sh
- examples/lighting-app/efr32/ out/lighting_app_debug_rpc BRD4161A
- -args='import("//with_pw_rpc.gni")'
+ run: |
+ scripts/examples/gn_efr32_example.sh examples/lighting-app/efr32/ out/lighting_app_debug_rpc BRD4161A \
+ -args='import("//with_pw_rpc.gni")'
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py efr32 BRD4161A+rpc lighting-app \
+ out/lighting_app_debug_rpc/BRD4161A/chip-efr32-lighting-example.out /tmp/bloat_reports/
- name: Build example EFR32 Window Covering for BRD4161A
timeout-minutes: 10
- run:
- scripts/examples/gn_efr32_example.sh examples/window-app/efr32/
- out/window_app_debug BRD4161A
+ run: |
+ scripts/examples/gn_efr32_example.sh examples/window-app/efr32/ out/window_app_debug BRD4161A
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py efr32 BRD4161A window-app \
+ out/window_app_debug/BRD4161A/chip-efr32-window-example.out /tmp/bloat_reports/
- name: Binary artifact suffix
id: outsuffix
uses: haya14busa/action-cond@v1.0.0
@@ -94,3 +101,9 @@ jobs:
out/lock_app_debug/BRD4161A/chip-efr32-lock-example.out.map
out/lighting_app_debug_rpc/BRD4161A/chip-efr32-lighting-example.out
out/lighting_app_debug_rpc/BRD4161A/chip-efr32-lighting-example.out.map
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,EFR32-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-esp32.yaml b/.github/workflows/examples-esp32.yaml
index 1cce1a171e9083..dfb4cf96f42243 100644
--- a/.github/workflows/examples-esp32.yaml
+++ b/.github/workflows/examples-esp32.yaml
@@ -30,6 +30,9 @@ jobs:
env:
BUILD_TYPE: esp32
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -64,6 +67,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/all-clusters-app/esp32/build/chip-all-clusters-app.elf \
example_binaries/$BUILD_TYPE-build/chip-all-clusters-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 m5stack all-clusters-app \
+ example_binaries/$BUILD_TYPE-build/chip-all-clusters-app.elf \
+ /tmp/bloat_reports/
- name: Build example All Clusters App C3
timeout-minutes: 10
run: scripts/examples/esp_example.sh all-clusters-app sdkconfig_c3devkit.defaults
@@ -72,6 +79,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/all-clusters-app/esp32/build/chip-all-clusters-app.elf \
example_binaries/$BUILD_TYPE-build/chip-all-clusters-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 c3devkit all-clusters-app \
+ example_binaries/$BUILD_TYPE-build/chip-all-clusters-app.elf \
+ /tmp/bloat_reports/
- name: Build example Pigweed App
timeout-minutes: 5
run: scripts/examples/esp_example.sh pigweed-app sdkconfig.defaults
@@ -80,6 +91,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/pigweed-app/esp32/build/chip-pigweed-app.elf \
example_binaries/$BUILD_TYPE-build/chip-pigweed-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default pigweed-app \
+ example_binaries/$BUILD_TYPE-build/chip-pigweed-app.elf \
+ /tmp/bloat_reports/
- name: Build example Lock App
timeout-minutes: 5
run: scripts/examples/esp_example.sh lock-app sdkconfig.defaults
@@ -88,6 +103,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/lock-app/esp32/build/chip-lock-app.elf \
example_binaries/$BUILD_TYPE-build/chip-lock-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default lock-app \
+ example_binaries/$BUILD_TYPE-build/chip-lock-app.elf \
+ /tmp/bloat_reports/
- name: Build example Bridge App
timeout-minutes: 5
run: scripts/examples/esp_example.sh bridge-app
@@ -96,6 +115,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/bridge-app/esp32/build/chip-bridge-app.elf \
example_binaries/$BUILD_TYPE-build/chip-bridge-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default bridge-app \
+ example_binaries/$BUILD_TYPE-build/chip-bridge-app.elf \
+ /tmp/bloat_reports/
- name: Build example Persistent Storage App
timeout-minutes: 5
run: scripts/examples/esp_example.sh persistent-storage sdkconfig.defaults
@@ -104,6 +127,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/persistent-storage/esp32/build/chip-persistent-storage.elf \
example_binaries/$BUILD_TYPE-build/chip-persistent-storage.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default persistent-storage \
+ example_binaries/$BUILD_TYPE-build/chip-persistent-storage.elf \
+ /tmp/bloat_reports/
- name: Build example Shell App
timeout-minutes: 5
run: scripts/examples/esp_example.sh shell sdkconfig.defaults
@@ -112,6 +139,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/shell/esp32/build/chip-shell.elf \
example_binaries/$BUILD_TYPE-build/chip-shell.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default shell \
+ example_binaries/$BUILD_TYPE-build/chip-shell.elf \
+ /tmp/bloat_reports/
- name: Build example Temperature Measurement App
timeout-minutes: 5
run: scripts/examples/esp_example.sh temperature-measurement-app sdkconfig.optimize.defaults
@@ -120,6 +151,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/temperature-measurement-app/esp32/build/chip-temperature-measurement-app.elf \
example_binaries/$BUILD_TYPE-build/chip-temperature-measurement-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 optimize temperature-measurement-app \
+ example_binaries/$BUILD_TYPE-build/chip-temperature-measurement-app.elf \
+ /tmp/bloat_reports/
- name: Build example IPv6 Only App
timeout-minutes: 5
run: scripts/examples/esp_example.sh ipv6only-app sdkconfig.defaults
@@ -128,6 +163,10 @@ jobs:
mkdir -p example_binaries/$BUILD_TYPE-build
cp examples/ipv6only-app/esp32/build/chip-ipv6only-app.elf \
example_binaries/$BUILD_TYPE-build/chip-ipv6only-app.elf
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ esp32 default ipv6only-app \
+ example_binaries/$BUILD_TYPE-build/chip-ipv6only-app.elf \
+ /tmp/bloat_reports/
- name: Binary artifact suffix
id: outsuffix
uses: haya14busa/action-cond@v1.0.0
@@ -145,3 +184,8 @@ jobs:
${{ env.BUILD_TYPE }}-example-build-${{
steps.outsuffix.outputs.value }}
path: /tmp/output_binaries/${{ env.BUILD_TYPE }}-build
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,ESP32-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: /tmp/bloat_reports/
diff --git a/.github/workflows/examples-infineon.yaml b/.github/workflows/examples-infineon.yaml
index 196fc434169a77..033cd8ee7a0311 100644
--- a/.github/workflows/examples-infineon.yaml
+++ b/.github/workflows/examples-infineon.yaml
@@ -28,6 +28,11 @@ jobs:
name: Infineon examples building
timeout-minutes: 30
+ env:
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
+
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -55,4 +60,13 @@ jobs:
run: |
scripts/run_in_build_env.sh \
"scripts/build/build_examples.py --no-log-timestamps --platform infineon --app lock build"
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ p6 default lock-app \
+ out/infineon-p6board-lock/chip-p6-lock-example.out
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,P6-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ out/infineon-p6board-lock/p6-default-lock-app-sizes.json
diff --git a/.github/workflows/examples-k32w.yaml b/.github/workflows/examples-k32w.yaml
index 11058a5a61129d..6e337556fb9947 100644
--- a/.github/workflows/examples-k32w.yaml
+++ b/.github/workflows/examples-k32w.yaml
@@ -29,6 +29,9 @@ jobs:
env:
BUILD_TYPE: gn_k32w
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -56,16 +59,28 @@ jobs:
.environment/pigweed-venv/*.log
- name: Build example K32W Lock App
timeout-minutes: 5
- run: scripts/examples/k32w_example.sh
- examples/lock-app/k32w out/lock_app_debug
+ run: |
+ scripts/examples/k32w_example.sh examples/lock-app/k32w out/lock_app_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ k32w k32w061+debug lock-app \
+ out/lock_app_debug/chip-k32w061-lock-example \
+ /tmp/bloat_reports/
- name: Build example K32W Shell App
timeout-minutes: 5
- run: scripts/examples/k32w_example.sh
- examples/shell/k32w out/shell_app_debug
+ run: |
+ scripts/examples/k32w_example.sh examples/shell/k32w out/shell_app_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ k32w k32w061+debug shell \
+ out/shell_app_debug/chip-k32w061-shell-example \
+ /tmp/bloat_reports/
- name: Build example K32W Lighting App with Secure Element
timeout-minutes: 5
- run: scripts/examples/k32w_se_example.sh
- examples/lighting-app/k32w out/lighting_app_se_release
+ run: |
+ scripts/examples/k32w_se_example.sh examples/lighting-app/k32w out/lighting_app_se_release
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ k32w k32w061+se05x+release lighting-app \
+ out/lighting_app_se_release/chip-k32w061-light-example \
+ /tmp/bloat_reports/
- name: Binary artifact suffix
id: outsuffix
uses: haya14busa/action-cond@v1.0.0
@@ -82,3 +97,9 @@ jobs:
path: |
out/lock_app_debug/chip-k32w061-lock-example.out
out/lock_app_debug/chip-k32w061-lock-example.out.map
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,K32W-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-linux-standalone.yaml b/.github/workflows/examples-linux-standalone.yaml
index ff751c8578dc31..1f77a89bbfd807 100644
--- a/.github/workflows/examples-linux-standalone.yaml
+++ b/.github/workflows/examples-linux-standalone.yaml
@@ -29,6 +29,9 @@ jobs:
env:
BUILD_TYPE: gn_linux
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -57,36 +60,62 @@ jobs:
.environment/pigweed-venv/*.log
- name: Build example Standalone Echo Client
timeout-minutes: 5
- run:
- scripts/examples/gn_build_example.sh examples/chip-tool
- out/chip_tool_debug
+ run: |
+ scripts/examples/gn_build_example.sh examples/chip-tool out/chip_tool_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug chip-tool \
+ out/chip_tool_debug/chip-tool \
+ /tmp/bloat_reports/
- name: Build example Standalone Shell
timeout-minutes: 5
- run:
- scripts/examples/gn_build_example.sh examples/shell/standalone
- out/shell_debug
+ run: |
+ scripts/examples/gn_build_example.sh examples/shell/standalone out/shell_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug shell \
+ out/shell_debug/chip-shell \
+ /tmp/bloat_reports/
- name: Build example Standalone All Clusters Server
timeout-minutes: 5
- run:
- scripts/examples/gn_build_example.sh examples/all-clusters-app/linux
- out/all_clusters_debug chip_bypass_rendezvous=true
+ run: |
+ scripts/examples/gn_build_example.sh examples/all-clusters-app/linux out/all_clusters_debug \
+ chip_bypass_rendezvous=true
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug all-clusters-app \
+ out/all_clusters_debug/chip-all-clusters-app \
+ /tmp/bloat_reports/
- name: Build example TV app
timeout-minutes: 5
- run:
+ run: |
scripts/examples/gn_build_example.sh examples/tv-app/linux out/tv_app_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug tv-app \
+ out/tv_app_debug/chip-tv-app \
+ /tmp/bloat_reports/
- name: Build example lighting app with RPCs
timeout-minutes: 5
- run:
- scripts/examples/gn_build_example.sh examples/lighting-app/linux
- out/lighting_app_debug_rpc 'import("//with_pw_rpc.gni")'
+ run: |
+ scripts/examples/gn_build_example.sh examples/lighting-app/linux out/lighting_app_debug_rpc \
+ 'import("//with_pw_rpc.gni")'
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug+rpc lighting-app \
+ out/lighting_app_debug_rpc/chip-lighting-app \
+ /tmp/bloat_reports/
- name: Build example Standalone Bridge
timeout-minutes: 5
- run:
+ run: |
scripts/examples/gn_build_example.sh examples/bridge-app/linux out/bridge_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug+rpc bridge-app \
+ out/bridge_debug/chip-bridge-app \
+ /tmp/bloat_reports/
- name: Build example OTA Provider
timeout-minutes: 5
- run:
+ run: |
scripts/examples/gn_build_example.sh examples/ota-provider-app/linux out/ota_provider_debug
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ linux debug ota-provider-app \
+ out/ota_provider_debug/chip-ota-provider-app \
+ /tmp/bloat_reports/
- name: Binary artifact suffix
id: outsuffix
uses: haya14busa/action-cond@v1.0.0
@@ -103,3 +132,9 @@ jobs:
path: |
out/all_clusters_debug/all-clusters-server
out/all_clusters_debug/all-clusters-server.map
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,Linux-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-mbed.yaml b/.github/workflows/examples-mbed.yaml
index 6cec70a4589264..6df9897a110d99 100644
--- a/.github/workflows/examples-mbed.yaml
+++ b/.github/workflows/examples-mbed.yaml
@@ -32,6 +32,9 @@ jobs:
BUILD_TYPE: mbedos
APP_PROFILE: release
APP_TARGET: CY8CPROTO_062_4343W
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -39,6 +42,7 @@ jobs:
container:
image: connectedhomeip/chip-build-mbed-os:latest
volumes:
+ - "/tmp/bloat_reports:/tmp/bloat_reports"
- "/tmp/output_binaries:/tmp/output_binaries"
steps:
@@ -62,11 +66,21 @@ jobs:
- name: Build lock-app example
timeout-minutes: 10
- run: scripts/examples/mbed_example.sh -a=lock-app -b=$APP_TARGET -p=$APP_PROFILE
+ run: |
+ scripts/examples/mbed_example.sh -a=lock-app -b=$APP_TARGET -p=$APP_PROFILE
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ mbed $APP_TARGET+$APP_PROFILE lock-app \
+ examples/lock-app/mbed/build-CY8CPROTO_062_4343W/release/chip-mbed-lock-app-example \
+ /tmp/bloat_reports/
- name: Build lighting-app example
timeout-minutes: 10
- run: scripts/examples/mbed_example.sh -a=lighting-app -b=$APP_TARGET -p=$APP_PROFILE
+ run: |
+ scripts/examples/mbed_example.sh -a=lighting-app -b=$APP_TARGET -p=$APP_PROFILE
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ mbed $APP_TARGET+$APP_PROFILE lighting-app \
+ examples/lighting-app/mbed/build-CY8CPROTO_062_4343W/release/chip-mbed-lighting-app-example \
+ /tmp/bloat_reports/
- name: Copy aside build products
run: |
@@ -91,3 +105,10 @@ jobs:
${{ env.BUILD_TYPE }}-binaries-${{env.APP_TARGET}}-${{ env.APP_PROFILE}}-build-${{
steps.outsuffix.outputs.value }}
path: /tmp/output_binaries/${{ env.BUILD_TYPE }}-build
+
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,Mbed-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-nrfconnect.yaml b/.github/workflows/examples-nrfconnect.yaml
index 21cbfaeefcd8fb..16bb09a5ee7e9b 100644
--- a/.github/workflows/examples-nrfconnect.yaml
+++ b/.github/workflows/examples-nrfconnect.yaml
@@ -29,6 +29,9 @@ jobs:
env:
BUILD_TYPE: nrfconnect
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -60,37 +63,87 @@ jobs:
run: scripts/run_in_build_env.sh "python3 scripts/setup/nrfconnect/update_ncs.py --update --shallow"
- name: Build example nRF Connect SDK Lock App on nRF52840 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh lock-app nrf52840dk_nrf52840
+ run: |
+ scripts/examples/nrfconnect_example.sh lock-app nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 lock-app \
+ examples/lock-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Lighting App on nRF52840 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh lighting-app nrf52840dk_nrf52840
+ run: |
+ scripts/examples/nrfconnect_example.sh lighting-app nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 lighting-app \
+ examples/lighting-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Lighting App on nRF52840 DK with RPC
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh lighting-app nrf52840dk_nrf52840 -DOVERLAY_CONFIG=rpc.overlay
+ run: |
+ scripts/examples/nrfconnect_example.sh lighting-app nrf52840dk_nrf52840 -DOVERLAY_CONFIG=rpc.overlay
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840+rpc lighting-app \
+ examples/lighting-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Shell on nRF52840 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh shell nrf52840dk_nrf52840
+ run: |
+ scripts/examples/nrfconnect_example.sh shell nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 shell \
+ examples/shell/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Pigweed on nRF52840 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh pigweed-app nrf52840dk_nrf52840
+ run: |
+ scripts/examples/nrfconnect_example.sh pigweed-app nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 pigweed-app \
+ examples/pigweed-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Lock App on nRF5340 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh lock-app nrf5340dk_nrf5340_cpuapp
+ run: |
+ scripts/examples/nrfconnect_example.sh lock-app nrf5340dk_nrf5340_cpuapp
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf5340dk_nrf5340_cpuapp lock-app \
+ examples/lock-app/nrfconnect/build/nrf5340dk_nrf5340_cpuapp/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Lighting App on nRF5340 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh lighting-app nrf5340dk_nrf5340_cpuapp
+ run: |
+ scripts/examples/nrfconnect_example.sh lighting-app nrf5340dk_nrf5340_cpuapp
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf5340dk_nrf5340_cpuapp lighting-app \
+ examples/lighting-app/nrfconnect/build/nrf5340dk_nrf5340_cpuapp/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Shell on nRF5340 DK
timeout-minutes: 10
- run: scripts/examples/nrfconnect_example.sh shell nrf5340dk_nrf5340_cpuapp
+ run: |
+ scripts/examples/nrfconnect_example.sh shell nrf5340dk_nrf5340_cpuapp
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf5340dk_nrf5340_cpuapp shell \
+ examples/shell/nrfconnect/build/nrf5340dk_nrf5340_cpuapp/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Pump App on nRF52840 DK
- timeout-minutes: 5
- run: scripts/examples/nrfconnect_example.sh pump-app nrf52840dk_nrf52840
+ timeout-minutes: 10
+ run: |
+ scripts/examples/nrfconnect_example.sh pump-app nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 pump-app \
+ examples/pump-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Build example nRF Connect SDK Pump Controller App on nRF52840 DK
- timeout-minutes: 5
- run: scripts/examples/nrfconnect_example.sh pump-controller-app nrf52840dk_nrf52840
+ timeout-minutes: 10
+ run: |
+ scripts/examples/nrfconnect_example.sh pump-controller-app nrf52840dk_nrf52840
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ nrfconnect nrf52840dk_nrf52840 pump-controller-app \
+ examples/pump-controller-app/nrfconnect/build/nrf52840dk_nrf52840/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
- name: Run unit tests for Zephyr native_posix_64 platform
timeout-minutes: 10
- run:
+ run: |
scripts/run_in_build_env.sh "scripts/tests/nrfconnect_native_posix_tests.sh native_posix_64"
- name: Copy aside build products
run: |
@@ -113,3 +166,9 @@ jobs:
${{ env.BUILD_TYPE }}-example-build-${{
steps.outsuffix.outputs.value }}
path: /tmp/output_binaries/${{ env.BUILD_TYPE }}-build
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,nRFConnect-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-qpg.yaml b/.github/workflows/examples-qpg.yaml
index 50518558390431..94e71a2df49d15 100644
--- a/.github/workflows/examples-qpg.yaml
+++ b/.github/workflows/examples-qpg.yaml
@@ -29,6 +29,9 @@ jobs:
env:
BUILD_TYPE: gn_qpg
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -56,16 +59,29 @@ jobs:
.environment/pigweed-venv/*.log
- name: Build example QPG6100 Lock App
timeout-minutes: 5
- run: scripts/examples/gn_build_example.sh
- examples/lock-app/qpg out/lock_app_debug qpg_target_ic=\"qpg6100\"
+ run: |
+ scripts/examples/gn_build_example.sh examples/lock-app/qpg out/lock_app_debug qpg_target_ic=\"qpg6100\"
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ qpg qpg6100+debug lock-app \
+ out/lock_app_debug/chip-qpg6100-lock-example.out \
+ /tmp/bloat_reports/
- name: Build example QPG6100 Lighting App
timeout-minutes: 5
- run: scripts/examples/gn_build_example.sh
- examples/lighting-app/qpg out/lighting_app_debug qpg_target_ic=\"qpg6100\"
+ run: |
+ scripts/examples/gn_build_example.sh examples/lighting-app/qpg out/lighting_app_debug qpg_target_ic=\"qpg6100\"
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ qpg qpg6100+debug lighting-app \
+ out/lighting_app_debug/chip-qpg6100-lighting-example.out \
+ /tmp/bloat_reports/
- name: Build example QPG6100 persistent-storage
timeout-minutes: 5
- run: scripts/examples/gn_build_example.sh
- examples/persistent-storage/qpg out/persistent-storage_app_debug qpg_target_ic=\"qpg6100\"
+ run: |
+ scripts/examples/gn_build_example.sh examples/persistent-storage/qpg out/persistent-storage_app_debug \
+ qpg_target_ic=\"qpg6100\"
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ qpg qpg6100+debug persistent-storage-app \
+ out/persistent-storage_app_debug/chip-qpg6100-persistent_storage-example.out \
+ /tmp/bloat_reports/
- name: Binary artifact suffix
id: outsuffix
uses: haya14busa/action-cond@v1.0.0
@@ -82,3 +98,9 @@ jobs:
path: |
out/lighting_app_debug/chip-qpg6100-lighting-example.out
out/lighting_app_debug/chip-qpg6100-lighting-example.out.map
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,QPG-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/.github/workflows/examples-telink.yaml b/.github/workflows/examples-telink.yaml
index dd971792abc77d..b279f8f235c8cd 100644
--- a/.github/workflows/examples-telink.yaml
+++ b/.github/workflows/examples-telink.yaml
@@ -27,6 +27,9 @@ jobs:
name: Telink
env:
BUILD_TYPE: telink
+ GH_EVENT_PR: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
+ GH_EVENT_HASH: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ GH_EVENT_PARENT: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
runs-on: ubuntu-latest
if: github.actor != 'restyled-io[bot]'
@@ -41,7 +44,17 @@ jobs:
uses: actions/checkout@v2
with:
submodules: true
- - name: Build example Telink Lighting App
+ - name: Build example Telink Lighting App
run: |
./scripts/run_in_build_env.sh \
"./scripts/build/build_examples.py --no-log-timestamps --platform telink --app light build"
+ .environment/pigweed-venv/bin/python3 scripts/tools/memory/gh_sizes.py \
+ telink tlsr9518adk80d lighting-app \
+ out/telink-tlsr9518adk80d-light/zephyr/zephyr.elf \
+ /tmp/bloat_reports/
+ - name: Uploading Size Reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: Size,Telink-Examples,${{ env.GH_EVENT_PR }},${{ env.GH_EVENT_HASH }},${{ env.GH_EVENT_PARENT }}
+ path: |
+ /tmp/bloat_reports/
diff --git a/scripts/helpers/bloat_check.py b/scripts/helpers/bloat_check.py
index 52b7d0f46047c5..db1157008d2a9f 100755
--- a/scripts/helpers/bloat_check.py
+++ b/scripts/helpers/bloat_check.py
@@ -268,6 +268,10 @@ def main():
pull_artifact_re = re.compile('^(.*)-pull-(\\d+)$')
binary_count = 0
for a in artifacts:
+ # Ignore size reports; they are handled by a separate script.
+ if a.name.startswith('Size,'):
+ continue
+
# logs cleanup after 3 days
is_log = a.name.endswith('-logs')
diff --git a/scripts/requirements.txt b/scripts/requirements.txt
index edcd7ae09b9348..7905aff72b6862 100644
--- a/scripts/requirements.txt
+++ b/scripts/requirements.txt
@@ -41,6 +41,7 @@ protobuf
# scripts/tools/memory
anytree
cxxfilt
+ghapi
pandas ; platform_machine != 'aarch64'
# scripts/build
diff --git a/scripts/tools/memory/collect.py b/scripts/tools/memory/collect.py
index 03f76e3b672c99..5c0a48e39183d2 100755
--- a/scripts/tools/memory/collect.py
+++ b/scripts/tools/memory/collect.py
@@ -36,6 +36,7 @@ def main(argv):
try:
config = memdf.collect.parse_args({
**memdf.select.CONFIG,
+ **memdf.report.REPORT_DEMANGLE_CONFIG,
**memdf.report.OUTPUT_CONFIG
}, argv)
memdf.report.write_dfs(config, memdf.collect.collect_files(config))
diff --git a/scripts/tools/memory/gh_report.py b/scripts/tools/memory/gh_report.py
new file mode 100755
index 00000000000000..7f395a684b0540
--- /dev/null
+++ b/scripts/tools/memory/gh_report.py
@@ -0,0 +1,548 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2021 Project CHIP Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Generate reports from size artifacts."""
+
+import io
+import itertools
+import json
+import logging
+import os
+import os.path
+import sqlite3
+import sys
+import zipfile
+
+from pathlib import Path
+from typing import Dict, IO, Iterable, Optional, Union
+
+import dateutil # type: ignore
+import fastcore # type: ignore
+import ghapi.all # type: ignore
+import pandas as pd # type: ignore
+
+import memdf.report
+import memdf.util.config
+import memdf.util.sqlite
+from memdf import Config, ConfigDescription
+
+GITHUB_CONFIG: ConfigDescription = {
+ Config.group_def('github'): {
+ 'title': 'github options',
+ },
+ 'github.token': {
+ 'help': 'Github API token, or "SKIP" to suppress connecting to github',
+ 'metavar': 'TOKEN',
+ 'default': '',
+ 'argparse': {
+ 'alias': ['--github-api-token', '--token'],
+ },
+ },
+ 'github.repository': {
+ 'help': 'Github repostiory',
+ 'metavar': 'OWNER/REPO',
+ 'default': '',
+ 'argparse': {
+ 'alias': ['--repo'],
+ },
+ },
+ 'github.comment': {
+ 'help': 'Send output as github PR comments',
+ 'default': False,
+ 'argparse': {
+ 'alias': ['--comment'],
+ },
+ },
+ 'github.keep': {
+ 'help': 'Leave PR artifacts after commenting',
+ 'default': False,
+ 'argparse': {
+ 'alias': ['--keep'],
+ },
+ },
+ Config.group_map('report'): {
+ 'group': 'output'
+ },
+ 'report.pr': {
+ 'help': 'Report on pull requests',
+ 'default': False,
+ 'argparse': {
+ 'alias': ['--pr', '--pull-request'],
+ },
+ },
+ 'report.push': {
+ 'help': 'Report on pushes',
+ 'default': False,
+ 'argparse': {
+ 'alias': ['--push']
+ },
+ },
+ 'report.query': {
+ 'help': 'Run an SQL query',
+ 'default': [],
+ 'argparse': {
+ 'alias': ['--query', '--sql']
+ },
+ },
+ 'report.increases': {
+ 'help': 'Highlight large increases',
+ 'metavar': 'PERCENT',
+ 'default': 0.0,
+ 'argparse': {
+ 'alias': ['--threshold'],
+ 'type': float,
+ },
+ },
+}
+
+
+class SizeDatabase(memdf.util.sqlite.Database):
+ """A database for recording and comparing size reports."""
+ on_open = ["PRAGMA foreign_keys = ON", "PRAGMA encoding = 'UTF-8'"]
+ on_writable = [
+ """
+ -- A ‘thing’ identifies the kind of built object.
+ -- Builds of the same thing are comparable.
+ CREATE TABLE IF NOT EXISTS thing (
+ id INTEGER PRIMARY KEY,
+ platform TEXT NOT NULL, -- Build platform
+ config TEXT NOT NULL, -- Build configuration discriminator
+ target TEXT NOT NULL, -- Build target
+ UNIQUE(platform, config, target)
+ )
+ """, """
+ -- A ‘build’ identifies a built instance of a thing at some point.
+ CREATE TABLE IF NOT EXISTS build (
+ id INTEGER PRIMARY KEY,
+ thing_id INTEGER REFERENCES thing(id),
+ hash TEXT NOT NULL, -- Commit hash
+ parent TEXT NOT NULL, -- Parent commit hash
+ pr INTEGER DEFAULT 0, -- Github PR number
+ time INTEGER NOT NULL, -- Unix-epoch timestamp
+ artifact INTEGER DEFAULT 0, -- Github artifact ID
+ commented INTEGER DEFAULT 0,
+ UNIQUE(thing_id, hash, parent, pr, time, artifact)
+ )
+ """, """
+ -- A ‘size’ entry gives the size of a section for a particular build.
+ CREATE TABLE IF NOT EXISTS size (
+ build_id INTEGER REFERENCES build(id),
+ name TEXT NOT NULL, -- Section name
+ size INTEGER NOT NULL, -- Section size in bytes
+ PRIMARY KEY (build_id, name)
+ )
+ """
+ ]
+
+ def __init__(self, config: Config):
+ super().__init__(config['database.file'])
+ self.config = config
+ self.gh = gh_open(config)
+ self.deleted_artifacts: set[int] = set()
+
+ def add_sizes(self, **kwargs):
+ """
+ Add a size report to the database.
+
+ The incoming arguments must contain the non-ID column names from
+ ‘thing’ and ‘build’ tables, plus a 'sizes' entry that is a sequence
+ of mappings containing 'name' and 'size'.
+ """
+ td = {k: kwargs[k] for k in ('platform', 'config', 'target')}
+ thing = self.store_and_return_id('thing', **td)
+ bd = {k: kwargs[k] for k in ('hash', 'parent', 'time')}
+ cd = {k: kwargs.get(k, 0) for k in ('pr', 'artifact', 'commented')}
+ build = self.store_and_return_id('build', thing_id=thing, **bd, **cd)
+ for d in kwargs['sizes']:
+ self.store('size', build_id=build, **d)
+
+ def add_sizes_from_json(self, s: Union[bytes, str], origin: Dict):
+ """Add sizes from a JSON size report."""
+ r = origin.copy()
+ r.update(json.loads(s))
+ by = r.get('by', 'section')
+ r['sizes'] = [{
+ 'name': s[by],
+ 'size': s['size']
+ } for s in r['frames'][by]]
+ self.add_sizes(**r)
+
+ def add_sizes_from_zipfile(self, f: Union[IO, Path], origin: Dict):
+ """Add size reports from a zip."""
+ with zipfile.ZipFile(f, 'r') as zip_file:
+ for i in zip_file.namelist():
+ if i.endswith('-sizes.json'):
+ origin['member'] = i
+ with zip_file.open(i) as member:
+ self.add_sizes_from_json(member.read(), origin)
+
+ def add_sizes_from_file(self, filename: str):
+ """Add size reports from a file."""
+ origin = {'file': filename}
+ path = Path(filename)
+ if path.suffix == '.json':
+ logging.info('Reading JSON %s', path)
+ with open(path) as f:
+ self.add_sizes_from_json(f.read(), origin)
+ elif path.suffix == '.zip':
+ logging.info('Reading ZIP %s', path)
+ self.add_sizes_from_zipfile(path, origin)
+ else:
+ logging.warning('Unknown file type "%s" ignored', filename)
+
+ def add_sizes_from_github(self):
+ """Read size report artifacts from github."""
+ if not self.gh:
+ return
+
+ # Size artifacts have names of the form
+ # Size,{group},{pr},{commit_hash},{parent_hash}
+ # Record them keyed by group and commit_hash to match them up
+ # after we have the entire list.
+ size_artifacts: Dict[str, Dict[str, fastcore.basics.AttrDict]] = {}
+ for i in ghapi.all.paged(self.gh.actions.list_artifacts_for_repo):
+ if not i.artifacts:
+ break
+ for a in i.artifacts:
+ if a.name.startswith('Size,'):
+ _, group, pr, commit, parent, *_ = (a.name + ',').split(
+ ',', 5)
+ a.parent = parent
+ a.pr = pr
+ a.created_at = dateutil.parser.isoparse(a.created_at)
+ if group not in size_artifacts:
+ size_artifacts[group] = {}
+ size_artifacts[group][commit] = a
+
+ # Determine required size artifacts.
+ required_artifact_ids: set[int] = set()
+ for group, group_reports in size_artifacts.items():
+ logging.info('Group %s', group)
+ for report in group_reports.values():
+ if self.config['report.pr' if report.pr else 'report.push']:
+ if report.parent not in group_reports:
+ logging.info(' No match for %s', report.name)
+ continue
+ # We have size information for both this report and its
+ # parent, so ensure that both artifacts are downloaded.
+ parent = group_reports[report.parent]
+ required_artifact_ids.add(report.id)
+ required_artifact_ids.add(parent.id)
+ logging.info(' Match %s', report.parent)
+ logging.info(' %s %s', report.id, report.name)
+ logging.info(' %s %s', parent.id, parent.name)
+
+ # Download and add required artifacts.
+ for i in required_artifact_ids:
+ logging.debug('Download artifact %d', i)
+ try:
+ blob = self.gh.actions.download_artifact(i, 'zip')
+ except Exception as e:
+ logging.error('Failed to download artifact %d: %s', i, e)
+ self.add_sizes_from_zipfile(io.BytesIO(blob), {'artifact': i})
+
+ def read_inputs(self):
+ """Read size report from github and/or local files."""
+ self.add_sizes_from_github()
+ for filename in self.config['args.inputs']:
+ self.add_sizes_from_file(filename)
+ self.commit()
+
+ def select_matching_commits(self):
+ """Find matching builds, where one's commit is the other's parent."""
+ return self.execute('''
+ SELECT DISTINCT c.pr AS pr, c.hash AS hash, p.hash AS parent
+ FROM build c
+ INNER JOIN build p ON p.hash = c.parent
+ WHERE c.commented = 0
+ ORDER BY c.pr, c.hash, p.hash ASC
+ ''')
+
+ def set_commented(self, build_ids: Iterable[int]):
+ """Set the commented flag for the given builds."""
+ if not build_ids:
+ return
+ for build_id in build_ids:
+ self.execute('UPDATE build SET commented = 1 WHERE id = ?',
+ (build_id, ))
+ self.commit()
+
+ def delete_stale_builds(self, build_ids: Iterable[int]):
+ """Delete stale builds."""
+ if not build_ids:
+ return
+ for build_id in build_ids:
+ logging.info('Deleting obsolete build %d', build_id)
+ self.execute('DELETE FROM size WHERE build_id = ?', (build_id, ))
+ self.execute('DELETE FROM build WHERE id = ?', (build_id, ))
+ self.commit()
+
+ def delete_artifact(self, artifact_id: int):
+ if self.gh and artifact_id not in self.deleted_artifacts:
+ self.deleted_artifacts.add(artifact_id)
+ self.gh.actions.delete_artifact(artifact_id)
+
+ def delete_stale_artifacts(self, stale_artifacts: Iterable[int]):
+ if not self.config['github.keep']:
+ for artifact_id in stale_artifacts:
+ logging.info('Deleting obsolete artifact %d', artifact_id)
+ self.delete_artifact(artifact_id)
+
+
+def gh_open(config: Config) -> Optional[ghapi.core.GhApi]:
+ """Return a GhApi, if so configured."""
+ gh: Optional[ghapi.core.GhApi] = None
+ if config['github.repository']:
+ owner, repo = config.get('github.repository').split('/', 1)
+ config.put('github.owner', owner)
+ config.put('github.repo', repo)
+ if not config['github.token']:
+ config['github.token'] = os.environ.get('GITHUB_TOKEN')
+ if not config['github.token']:
+ logging.error('Missing --github-token')
+ return None
+ token = config['github.token']
+ if token != 'SKIP':
+ gh = ghapi.all.GhApi(owner=owner,
+ repo=repo,
+ token=config['github.token'])
+ return gh
+
+
+def gh_get_comments_for_pr(gh: ghapi.core.GhApi, pr: int):
+ return itertools.chain.from_iterable(
+ ghapi.all.paged(gh.issues.list_comments, pr))
+
+
+def percent_change(a: int, b: int) -> float:
+ if a == 0:
+ return 0.0 if b == 0 else float('inf')
+ return 100. * (b - a) / a
+
+
+def changes_for_commit(db: SizeDatabase, pr: int, commit: str,
+ parent: str) -> pd.DataFrame:
+ """Return a DataFrame with size changes between the given commits."""
+ cur = db.execute(
+ '''
+ SELECT DISTINCT
+ t.id AS thing,
+ cb.artifact AS artifact,
+ pb.id AS parent_build,
+ cb.id AS commit_build,
+ t.platform, t.config, t.target,
+ cs.name,
+ ps.size AS parent_size,
+ cs.size AS commit_size,
+ cs.size - ps.size AS change
+ FROM thing t
+ INNER JOIN build cb ON cb.thing_id = t.id
+ INNER JOIN build pb ON pb.thing_id = t.id AND pb.hash = cb.parent
+ INNER JOIN size cs ON cs.build_id = cb.id
+ INNER JOIN size ps ON ps.build_id = pb.id AND cs.name = ps.name
+ WHERE cb.hash = ? AND pb.hash = ?
+ ORDER BY t.platform, t.config, t.target,
+ cs.name, cb.time DESC, pb.time DESC
+ ''', (commit, parent))
+
+ keep = ('platform', 'target', 'config', 'name', 'parent_size',
+ 'commit_size', 'change')
+ things: set[int] = set()
+ artifacts: set[int] = set()
+ builds: set[int] = set()
+ stale_builds: set[int] = set()
+ stale_artifacts: set[int] = set()
+ previous: Optional[sqlite3.Row] = None
+ rows = []
+
+ for row in cur.fetchall():
+ row = sqlite3.Row(cur, row)
+ things.add(row['thing'])
+ if (previous is not None and row['thing'] == previous['thing']
+ and row['name'] == previous['name']):
+ # This is duplicate build, older because we sort descending,
+ # presumably from a partial workflow re-run.
+ if row['parent_build'] != previous['parent_build']:
+ stale_builds.add(row['parent_build'])
+ if row['commit_build'] != previous['commit_build']:
+ stale_builds.add(row['commit_build'])
+ stale_artifacts.add(row['artifact'])
+ else:
+ previous = row
+ new = [row[k] for k in keep]
+ new.append(percent_change(row['parent_size'], row['commit_size']))
+ rows.append(new)
+ artifacts.add(row['artifact'])
+ builds.add(row['commit_build'])
+
+ db.delete_stale_builds(stale_builds)
+ db.delete_stale_artifacts(stale_artifacts)
+
+ df = pd.DataFrame(rows,
+ columns=('platform', 'target', 'config', 'section',
+ parent[:8], commit[:8], 'change', '% change'))
+ df.attrs = {
+ 'name': f'{pr},{commit},{parent}',
+ 'title': (f'PR #{pr}: ' if pr else '') +
+ f'Size comparison from {commit} to {parent}',
+ 'things': things,
+ 'builds': builds,
+ 'artifacts': artifacts,
+ 'pr': pr,
+ 'commit': commit,
+ 'parent': parent,
+ }
+ return df
+
+
+def gh_send_change_report(db: SizeDatabase, df: pd.DataFrame,
+ tdf: pd.DataFrame) -> bool:
+ """Send a change report as a github comment."""
+ if not db.gh:
+ return False
+ pr = df.attrs['pr']
+ title = df.attrs['title']
+ existing_comment_id = 0
+ for comment in gh_get_comments_for_pr(db.gh, pr):
+ if comment.body.partition('\n')[0] == df.attrs['title']:
+ existing_comment_id = comment.id
+ title = comment.body
+ break
+
+ md = io.StringIO()
+ md.write(title)
+ md.write('\n')
+
+ if tdf is not None and not tdf.empty:
+ md.write(f'\n**{tdf.attrs["title"]}:**\n\n')
+ memdf.report.write_df(db.config,
+ tdf,
+ md,
+ 'pipe',
+ hierify=True,
+ title=False,
+ tabulate={'floatfmt': '5.1f'})
+
+ count = len(df.attrs['things'])
+ summary = f'{count} build{"" if count == 1 else "s"}'
+ md.write(f'\n{summary}
\n\n')
+ memdf.report.write_df(db.config,
+ df,
+ md,
+ 'pipe',
+ hierify=True,
+ title=False,
+ tabulate={'floatfmt': '5.1f'})
+ md.write('\n