diff --git a/.github/ISSUE_TEMPLATE/Issue-report.yml b/.github/ISSUE_TEMPLATE/Issue-report.yml index d5b756085c7..436c8a57b7e 100644 --- a/.github/ISSUE_TEMPLATE/Issue-report.yml +++ b/.github/ISSUE_TEMPLATE/Issue-report.yml @@ -5,7 +5,7 @@ body: - type: markdown attributes: value: | - * Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue) + * Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue) * Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html) * Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html) * If still experiencing the issue, please provide as many details as possible below about your hardware, computer setup and code. @@ -24,7 +24,7 @@ body: description: What development board or other hardware is the chip attached to? placeholder: ex. DevKitC, plain module on breadboard, etc. If your hardware is custom or unusual, please attach a photo. validations: - required: true + required: true - type: textarea id: other-hw attributes: @@ -60,7 +60,7 @@ body: - v2.0.8 - v2.0.7 - v2.0.6 - - v2.0.5 + - v2.0.5 - v2.0.4 - v2.0.3 - v2.0.2 @@ -75,9 +75,9 @@ body: attributes: label: IDE Name description: What IDE are you using? - placeholder: eg. Arduino IDE, PlatformIO, Sloeber... + placeholder: eg. Arduino IDE, VSCode, Sloeber... validations: - required: true + required: true - type: input id: os attributes: @@ -95,13 +95,13 @@ body: validations: required: true - type: dropdown - id: PSRAM + id: PSRAM attributes: label: PSRAM enabled description: Is PSRAM enabled? options: - - 'yes' - - 'no' + - "yes" + - "no" validations: required: true - type: input @@ -116,8 +116,8 @@ body: id: Description attributes: label: Description - description: Please describe your problem here and expected behaviour - placeholder: ex. Can't connect/weird behaviour/wrong function/missing parameter.. + description: Please describe your problem here and expected behavior + placeholder: ex. Can't connect/weird behavior/wrong function/missing parameter.. validations: required: true - type: textarea @@ -128,7 +128,7 @@ body: placeholder: ex. Related part of the code to replicate the issue render: cpp validations: - required: true + required: true - type: textarea id: Debug attributes: @@ -137,11 +137,11 @@ body: placeholder: Enable Core debug level - Debug on tools menu of Arduino IDE, then put the serial output here. render: plain validations: - required: true + required: true - type: textarea id: other-remarks attributes: - label: Other Steps to Reproduce + label: Other Steps to Reproduce description: Is there any other information you can think of which will help us reproduce this problem? Any additional info can be added as well. placeholder: ex. I also tried on other OS, HW...it works correctly on that setup. - type: checkboxes diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 2a9b0ef82e0..03b3a76df1e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -5,4 +5,4 @@ contact_links: about: Community channel for questions and help - name: ESP32 Forum - Arduino url: https://esp32.com/viewforum.php?f=19 - about: Official Forum for questions \ No newline at end of file + about: Official Forum for questions diff --git a/.github/scripts/check-cmakelists.sh b/.github/scripts/check-cmakelists.sh index 98d9722ad83..7d4f6b4e2d9 100755 --- a/.github/scripts/check-cmakelists.sh +++ b/.github/scripts/check-cmakelists.sh @@ -1,4 +1,5 @@ #!/bin/bash + # # This script is used in the CI workflow. It checks all non-examples source files in libraries/ and cores/ are listed in # CMakeLists.txt for the cmake-based IDF component @@ -12,10 +13,10 @@ set -e git submodule update --init --recursive # find all source files in repo -REPO_SRCS=`find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort` +REPO_SRCS=$(find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort) # find all source files named in CMakeLists.txt COMPONENT_SRCS -CMAKE_SRCS=`cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort` +CMAKE_SRCS=$(cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort) if ! diff -u0 --label "Repo Files" --label "srcs" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match" diff --git a/.github/scripts/find_all_boards.sh b/.github/scripts/find_all_boards.sh index b474a49bc2e..67b46661ca5 100755 --- a/.github/scripts/find_all_boards.sh +++ b/.github/scripts/find_all_boards.sh @@ -3,7 +3,9 @@ # Get all boards boards_array=() -for line in `grep '.tarch=' boards.txt`; do +boards_list=$(grep '.tarch=' boards.txt) + +while read -r line; do board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1) # skip esp32c2 as we dont build libs for it if [ "$board_name" == "esp32c2" ]; then @@ -12,29 +14,26 @@ for line in `grep '.tarch=' boards.txt`; do fi boards_array+=("espressif:esp32:$board_name") echo "Added 'espressif:esp32:$board_name' to array" -done +done <<< "$boards_list" # Create JSON like string with all boards found and pass it to env variable board_count=${#boards_array[@]} echo "Boards found: $board_count" -echo "BOARD-COUNT=$board_count" >> $GITHUB_ENV +echo "BOARD-COUNT=$board_count" >> "$GITHUB_ENV" -if [ $board_count -gt 0 ] -then +if [ "$board_count" -gt 0 ]; then json_matrix='[' - for board in ${boards_array[@]} - do + for board in "${boards_array[@]}"; do json_matrix+='"'$board'"' - if [ $board_count -gt 1 ] - then + if [ "$board_count" -gt 1 ]; then json_matrix+="," fi - board_count=$(($board_count - 1)) + board_count=$((board_count - 1)) done json_matrix+=']' - echo $json_matrix - echo "FQBNS=${json_matrix}" >> $GITHUB_ENV + echo "$json_matrix" + echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV" else - echo "FQBNS=" >> $GITHUB_ENV + echo "FQBNS=" >> "$GITHUB_ENV" fi diff --git a/.github/scripts/find_new_boards.sh b/.github/scripts/find_new_boards.sh index 706676b4a4c..4482aa2b1da 100755 --- a/.github/scripts/find_new_boards.sh +++ b/.github/scripts/find_new_boards.sh @@ -5,14 +5,13 @@ owner_repository=$1 base_ref=$2 # Download the boards.txt file from the base branch -curl -L -o boards_base.txt https://raw.githubusercontent.com/$owner_repository/$base_ref/boards.txt +curl -L -o boards_base.txt https://raw.githubusercontent.com/"$owner_repository"/"$base_ref"/boards.txt # Compare boards.txt file in the repo with the modified file from PR diff=$(diff -u boards_base.txt boards.txt) # Check if the diff is empty -if [ -z "$diff" ] -then +if [ -z "$diff" ]; then echo "No changes in boards.txt file" echo "FQBNS=" exit 0 @@ -21,7 +20,7 @@ fi # Extract added or modified lines (lines starting with '+' or '-') modified_lines=$(echo "$diff" | grep -E '^[+-][^+-]') -# Print the modified lines for debugging +# Print the modified lines for debugging echo "Modified lines:" echo "$modified_lines" @@ -29,15 +28,12 @@ boards_array=() previous_board="" # Extract board names from the modified lines, and add them to the boards_array -while read -r line -do +while read -r line; do board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1) # remove + or - from the board name at the beginning - board_name=$(echo "$board_name" | sed 's/^[+-]//') - if [ "$board_name" != "" ] && [ "$board_name" != "+" ] && [ "$board_name" != "-" ] && [ "$board_name" != "esp32_family" ] - then - if [ "$board_name" != "$previous_board" ] - then + board_name=${board_name#[-+]} + if [ "$board_name" != "" ] && [ "$board_name" != "+" ] && [ "$board_name" != "-" ] && [ "$board_name" != "esp32_family" ]; then + if [ "$board_name" != "$previous_board" ]; then boards_array+=("espressif:esp32:$board_name") previous_board="$board_name" echo "Added 'espressif:esp32:$board_name' to array" @@ -48,22 +44,19 @@ done <<< "$modified_lines" # Create JSON like string with all boards found and pass it to env variable board_count=${#boards_array[@]} -if [ $board_count -gt 0 ] -then +if [ "$board_count" -gt 0 ]; then json_matrix='{"fqbn": [' - for board in ${boards_array[@]} - do + for board in "${boards_array[@]}"; do json_matrix+='"'$board'"' - if [ $board_count -gt 1 ] - then + if [ "$board_count" -gt 1 ]; then json_matrix+="," fi - board_count=$(($board_count - 1)) + board_count=$((board_count - 1)) done json_matrix+=']}' - echo $json_matrix - echo "FQBNS=${json_matrix}" >> $GITHUB_ENV + echo "$json_matrix" + echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV" else - echo "FQBNS=" >> $GITHUB_ENV -fi \ No newline at end of file + echo "FQBNS=" >> "$GITHUB_ENV" +fi diff --git a/.github/scripts/install-arduino-cli.sh b/.github/scripts/install-arduino-cli.sh index 533b39c7ddd..bb7f544e752 100755 --- a/.github/scripts/install-arduino-cli.sh +++ b/.github/scripts/install-arduino-cli.sh @@ -1,6 +1,6 @@ #!/bin/bash -OSBITS=`uname -m` +OSBITS=$(uname -m) if [[ "$OSTYPE" == "linux"* ]]; then export OS_IS_LINUX="1" if [[ "$OSBITS" == "i686" ]]; then @@ -49,4 +49,3 @@ if [ ! -d "$ARDUINO_IDE_PATH" ] || [ ! -f "$ARDUINO_IDE_PATH/arduino-cli" ]; the curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | BINDIR="$ARDUINO_IDE_PATH" sh fi fi - diff --git a/.github/scripts/install-arduino-core-esp32.sh b/.github/scripts/install-arduino-core-esp32.sh index fe50c909198..e0071a0eb83 100755 --- a/.github/scripts/install-arduino-core-esp32.sh +++ b/.github/scripts/install-arduino-core-esp32.sh @@ -5,7 +5,7 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then echo "Installing ESP32 Arduino Core ..." script_init_path="$PWD" mkdir -p "$ARDUINO_USR_PATH/hardware/espressif" - cd "$ARDUINO_USR_PATH/hardware/espressif" + cd "$ARDUINO_USR_PATH/hardware/espressif" || exit echo "Installing Python Serial ..." pip install pyserial > /dev/null @@ -15,25 +15,25 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then pip install requests > /dev/null fi - if [ ! -z "$GITHUB_REPOSITORY" ]; then + if [ -n "$GITHUB_REPOSITORY" ]; then echo "Linking Core..." - ln -s $GITHUB_WORKSPACE esp32 + ln -s "$GITHUB_WORKSPACE" esp32 else echo "Cloning Core Repository..." git clone https://github.com/espressif/arduino-esp32.git esp32 > /dev/null 2>&1 fi #echo "Updating Submodules ..." - cd esp32 + cd esp32 || exit #git submodule update --init --recursive > /dev/null 2>&1 echo "Installing Platform Tools ..." if [ "$OS_IS_WINDOWS" == "1" ]; then - cd tools && ./get.exe + cd tools && ./get.exe else - cd tools && python get.py + cd tools && python get.py fi - cd $script_init_path + cd "$script_init_path" || exit echo "ESP32 Arduino has been installed in '$ARDUINO_ESP32_PATH'" echo "" diff --git a/.github/scripts/install-arduino-ide.sh b/.github/scripts/install-arduino-ide.sh index 7fd95797834..5b3bcb1791e 100755 --- a/.github/scripts/install-arduino-ide.sh +++ b/.github/scripts/install-arduino-ide.sh @@ -4,7 +4,7 @@ #OSTYPE: 'msys', ARCH: 'x86_64' => win32 #OSTYPE: 'darwin18', ARCH: 'i386' => macos -OSBITS=`uname -m` +OSBITS=$(uname -m) if [[ "$OSTYPE" == "linux"* ]]; then export OS_IS_LINUX="1" ARCHIVE_FORMAT="tar.xz" @@ -77,4 +77,3 @@ if [ ! -d "$ARDUINO_IDE_PATH" ]; then echo "Arduino IDE Installed in '$ARDUINO_IDE_PATH'" echo "" fi - diff --git a/.github/scripts/install-platformio-esp32.sh b/.github/scripts/install-platformio-esp32.sh index 80c668bdc0e..8cd6552eb80 100755 --- a/.github/scripts/install-platformio-esp32.sh +++ b/.github/scripts/install-platformio-esp32.sh @@ -52,7 +52,7 @@ python -c "$replace_script" if [ "$GITHUB_REPOSITORY" == "espressif/arduino-esp32" ]; then echo "Linking Core..." - ln -s $GITHUB_WORKSPACE "$PLATFORMIO_ESP32_PATH" + ln -s "$GITHUB_WORKSPACE" "$PLATFORMIO_ESP32_PATH" else echo "Cloning Core Repository ..." git clone --recursive https://github.com/espressif/arduino-esp32.git "$PLATFORMIO_ESP32_PATH" > /dev/null 2>&1 @@ -61,7 +61,7 @@ fi echo "PlatformIO for ESP32 has been installed" echo "" -function build_pio_sketch(){ # build_pio_sketch +function build_pio_sketch { # build_pio_sketch if [ "$#" -lt 3 ]; then echo "ERROR: Illegal number of parameters" echo "USAGE: build_pio_sketch " @@ -71,13 +71,15 @@ function build_pio_sketch(){ # build_pio_sketch local board="$1" local options="$2" local sketch="$3" - local sketch_dir=$(dirname "$sketch") + local sketch_dir + + sketch_dir=$(dirname "$sketch") echo "" - echo "Compiling '"$(basename "$sketch")"' ..." + echo "Compiling '$(basename "$sketch")' ..." python -m platformio ci --board "$board" "$sketch_dir" --project-option="$options" } -function build_pio_sketches(){ # build_pio_sketches +function build_pio_sketches { # build_pio_sketches if [ "$#" -lt 3 ]; then echo "ERROR: Illegal number of parameters" echo "USAGE: build_pio_sketches [ ]" @@ -108,27 +110,34 @@ function build_pio_sketches(){ # build_pio_sketches ' (x.x.x-rc9223372036854775807) to ensure having REL above any RC -# Dummy approach, functional anyway for current ESP package versioning (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap) + +# Normalize ESP release version string (x.x.x) by adding '-rc' (x.x.x-rc9223372036854775807) +# to ensure having REL above any RC +# Dummy approach, functional anyway for current ESP package versioning +# (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap) def pkgVersionNormalized(versionString): verStr = str(versionString) - verParts = re.split('\.|-rc|-alpha', verStr, flags=re.IGNORECASE) - + verParts = re.split(r"\.|-rc|-alpha", verStr, flags=re.IGNORECASE) + if len(verParts) == 3: - if (sys.version_info > (3, 0)): # Python 3 - verStr = str(versionString) + '-rc' + str(sys.maxsize) - else: # Python 2 - verStr = str(versionString) + '-rc' + str(sys.maxint) - + if sys.version_info > (3, 0): # Python 3 + verStr = str(versionString) + "-rc" + str(sys.maxsize) + else: # Python 2 + verStr = str(versionString) + "-rc" + str(sys.maxint) + elif len(verParts) != 4: print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr) - + return verStr @@ -54,31 +62,37 @@ def main(args): return 1 tools = {} - platforms = {} + platforms = {} pkg1 = load_package(args[1]) - tools = merge_objects(tools, pkg1['tools']); - platforms = merge_objects(platforms, pkg1['platforms']); + tools = merge_objects(tools, pkg1["tools"]) + platforms = merge_objects(platforms, pkg1["platforms"]) pkg2 = load_package(args[2]) - tools = merge_objects(tools, pkg2['tools']); - platforms = merge_objects(platforms, pkg2['platforms']); + tools = merge_objects(tools, pkg2["tools"]) + platforms = merge_objects(platforms, pkg2["platforms"]) - pkg1['tools'] = [] - pkg1['platforms'] = [] + pkg1["tools"] = [] + pkg1["platforms"] = [] for name in tools: for version in tools[name]: print("Adding tool {0}-{1}".format(name, version), file=sys.stderr) - pkg1['tools'].append(tools[name][version]) + pkg1["tools"].append(tools[name][version]) for name in platforms: for version in platforms[name]: print("Adding platform {0}-{1}".format(name, version), file=sys.stderr) - pkg1['platforms'].append(platforms[name][version]) - - #pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: LooseVersion(pkgVersionNormalized(k['version'])), reverse=True) - pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: Version(pkgVersionNormalized(k['version'])), reverse=True) + pkg1["platforms"].append(platforms[name][version]) + + # pkg1["platforms"] = sorted( + # pkg1["platforms"], key=lambda k: LooseVersion(pkgVersionNormalized(k["version"])), reverse=True + # ) + + pkg1["platforms"] = sorted( + pkg1["platforms"], key=lambda k: Version(pkgVersionNormalized(k["version"])), reverse=True + ) + + json.dump({"packages": [pkg1]}, sys.stdout, indent=2) - json.dump({'packages':[pkg1]}, sys.stdout, indent=2) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv)) diff --git a/.github/scripts/on-pages.sh b/.github/scripts/on-pages.sh index 124518469d2..877d036106b 100755 --- a/.github/scripts/on-pages.sh +++ b/.github/scripts/on-pages.sh @@ -1,12 +1,13 @@ -#/bin/bash +#!/bin/bash + set -e -function get_file_size(){ +function get_file_size { local file="$1" if [[ "$OSTYPE" == "darwin"* ]]; then - eval `stat -s "$file"` + eval "$(stat -s "$file")" local res="$?" - echo "$st_size" + echo "${st_size:?}" return $res else stat --printf="%s" "$file" @@ -15,25 +16,32 @@ function get_file_size(){ } #git_remove_from_pages -function git_remove_from_pages(){ +function git_remove_from_pages { local path=$1 - local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"` - local type=`echo "$info" | jq -r '.type'` - if [ ! $type == "file" ]; then - if [ ! $type == "null" ]; then + local info + local type + local sha + local message + + info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages") + type=$(echo "$info" | jq -r '.type') + + if [ ! "$type" == "file" ]; then + if [ ! "$type" == "null" ]; then echo "Wrong type '$type'" else echo "File is not on Pages" fi return 0 fi - local sha=`echo "$info" | jq -r '.sha'` - local message="Deleting "$(basename $path) + + sha=$(echo "$info" | jq -r '.sha') + message="Deleting "$(basename "$path") local json="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"sha\":\"$sha\"}" echo "$json" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X DELETE --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" } -function git_upload_to_pages(){ +function git_upload_to_pages { local path=$1 local src=$2 @@ -42,41 +50,50 @@ function git_upload_to_pages(){ return 1 fi - local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"` - local type=`echo "$info" | jq -r '.type'` - local message=$(basename $path) + local info + local type + local message local sha="" local content="" - if [ $type == "file" ]; then - sha=`echo "$info" | jq -r '.sha'` + info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages") + type=$(echo "$info" | jq -r '.type') + message=$(basename "$path") + + if [ "$type" == "file" ]; then + sha=$(echo "$info" | jq -r '.sha') sha=",\"sha\":\"$sha\"" message="Updating $message" - elif [ ! $type == "null" ]; then + elif [ ! "$type" == "null" ]; then >&2 echo "Wrong type '$type'" return 1 else message="Creating $message" fi - content=`base64 -i "$src"` + content=$(base64 -i "$src") data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}" echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" } -function git_safe_upload_to_pages(){ +function git_safe_upload_to_pages { local path=$1 local file="$2" - local name=$(basename "$file") - local size=`get_file_size "$file"` - local upload_res=`git_upload_to_pages "$path" "$file"` - if [ $? -ne 0 ]; then + local name + local size + local upload_res + + name=$(basename "$file") + size=$(get_file_size "$file") + + if ! upload_res=$(git_upload_to_pages "$path" "$file"); then >&2 echo "ERROR: Failed to upload '$name' ($?)" return 1 fi - up_size=`echo "$upload_res" | jq -r '.content.size'` - if [ $up_size -ne $size ]; then + + up_size=$(echo "$upload_res" | jq -r '.content.size') + if [ "$up_size" -ne "$size" ]; then >&2 echo "ERROR: Uploaded size does not match! $up_size != $size" #git_delete_asset return 1 diff --git a/.github/scripts/on-push.sh b/.github/scripts/on-push.sh index 73d9eeee398..5158df3cc9b 100755 --- a/.github/scripts/on-push.sh +++ b/.github/scripts/on-push.sh @@ -4,43 +4,44 @@ set -e export ARDUINO_BUILD_DIR="$HOME/.arduino/build.tmp" -function build(){ +function build { local target=$1 local chunk_index=$2 local chunks_cnt=$3 local build_log=$4 local sketches_file=$5 - shift; shift; shift; shift; shift; - local sketches=$* + shift 5 + local sketches=("$@") local BUILD_SKETCH="${SCRIPTS_DIR}/sketch_utils.sh build" local BUILD_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh chunk_build" - local args="-ai $ARDUINO_IDE_PATH -au $ARDUINO_USR_PATH" - - args+=" -t $target" + local args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH" "-t" "$target") if [ "$OS_IS_LINUX" == "1" ]; then - args+=" -p $ARDUINO_ESP32_PATH/libraries" - args+=" -i $chunk_index -m $chunks_cnt" + args+=("-p" "$ARDUINO_ESP32_PATH/libraries" "-i" "$chunk_index" "-m" "$chunks_cnt") if [ -n "$sketches_file" ]; then - args+=" -f $sketches_file" + args+=("-f" "$sketches_file") fi - if [ $build_log -eq 1 ]; then - args+=" -l $build_log" + if [ "$build_log" -eq 1 ]; then + args+=("-l" "$build_log") fi - ${BUILD_SKETCHES} ${args} + ${BUILD_SKETCHES} "${args[@]}" else - for sketch in ${sketches}; do - local sargs="$args -s $(dirname $sketch)" + for sketch in "${sketches[@]}"; do + local sargs=("${args[@]}") + local ctags_version + local preprocessor_version + sargs+=("-s" "$(dirname "$sketch")") if [ "$OS_IS_WINDOWS" == "1" ] && [ -d "$ARDUINO_IDE_PATH/tools-builder" ]; then - local ctags_version=`ls "$ARDUINO_IDE_PATH/tools-builder/ctags/"` - local preprocessor_version=`ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/"` - win_opts="-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version - -prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version" - sargs+=" ${win_opts}" + ctags_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/ctags/") + preprocessor_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/") + sargs+=( + "-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version" + "-prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version" + ) fi - ${BUILD_SKETCH} ${sargs} + ${BUILD_SKETCH} "${sargs[@]}" done fi } @@ -73,40 +74,40 @@ fi SCRIPTS_DIR="./.github/scripts" if [ "$BUILD_PIO" -eq 0 ]; then - source ${SCRIPTS_DIR}/install-arduino-cli.sh - source ${SCRIPTS_DIR}/install-arduino-core-esp32.sh - - SKETCHES_ESP32="\ - $ARDUINO_ESP32_PATH/libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino\ - $ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino\ - $ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino\ - $ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino\ - " + source "${SCRIPTS_DIR}/install-arduino-cli.sh" + source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh" + + SKETCHES_ESP32=( + "$ARDUINO_ESP32_PATH/libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" + "$ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino" + "$ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino" + "$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino" + ) #create sizes_file sizes_file="$GITHUB_WORKSPACE/cli_compile_$CHUNK_INDEX.json" if [ "$BUILD_LOG" -eq 1 ]; then #create sizes_file and echo start of JSON array with "boards" key - echo "{\"boards\": [" > $sizes_file + echo "{\"boards\": [" > "$sizes_file" fi #build sketches for different targets - build "esp32p4" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32s3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32s2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32c3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32c6" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32h2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" - build "esp32" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "$SKETCHES_ESP32" + build "esp32p4" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32s3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32s2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32c3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32c6" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32h2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" + build "esp32" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" if [ "$BUILD_LOG" -eq 1 ]; then #remove last comma from the last JSON object sed -i '$ s/,$//' "$sizes_file" #echo end of JSON array - echo "]}" >> $sizes_file + echo "]}" >> "$sizes_file" fi else - source ${SCRIPTS_DIR}/install-platformio-esp32.sh + source "${SCRIPTS_DIR}/install-platformio-esp32.sh" # PlatformIO ESP32 Test BOARD="esp32dev" OPTIONS="board_build.partitions = huge_app.csv" @@ -117,8 +118,7 @@ else build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino" # Basic sanity testing for other series - for board in "esp32-c3-devkitm-1" "esp32-s2-saola-1" "esp32-s3-devkitc-1" - do + for board in "esp32-c3-devkitm-1" "esp32-s2-saola-1" "esp32-s3-devkitc-1"; do python -m platformio ci --board "$board" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient" --project-option="board_build.partitions = huge_app.csv" done diff --git a/.github/scripts/on-release.sh b/.github/scripts/on-release.sh index bbb46fa5563..119c66e1eab 100755 --- a/.github/scripts/on-release.sh +++ b/.github/scripts/on-release.sh @@ -1,29 +1,34 @@ #!/bin/bash +# Disable shellcheck warning about using 'cat' to read a file. +# Disable shellcheck warning about using individual redirections for each command. +# Disable shellcheck warning about $? uses. +# shellcheck disable=SC2002,SC2129,SC2181,SC2319 -if [ ! $GITHUB_EVENT_NAME == "release" ]; then +if [ ! "$GITHUB_EVENT_NAME" == "release" ]; then echo "Wrong event '$GITHUB_EVENT_NAME'!" exit 1 fi -EVENT_JSON=`cat $GITHUB_EVENT_PATH` +EVENT_JSON=$(cat "$GITHUB_EVENT_PATH") -action=`echo $EVENT_JSON | jq -r '.action'` -if [ ! $action == "published" ]; then +action=$(echo "$EVENT_JSON" | jq -r '.action') +if [ ! "$action" == "published" ]; then echo "Wrong action '$action'. Exiting now..." exit 0 fi -draft=`echo $EVENT_JSON | jq -r '.release.draft'` -if [ $draft == "true" ]; then +draft=$(echo "$EVENT_JSON" | jq -r '.release.draft') +if [ "$draft" == "true" ]; then echo "It's a draft release. Exiting now..." exit 0 fi -RELEASE_PRE=`echo $EVENT_JSON | jq -r '.release.prerelease'` -RELEASE_TAG=`echo $EVENT_JSON | jq -r '.release.tag_name'` -RELEASE_BRANCH=`echo $EVENT_JSON | jq -r '.release.target_commitish'` -RELEASE_ID=`echo $EVENT_JSON | jq -r '.release.id'` +RELEASE_PRE=$(echo "$EVENT_JSON" | jq -r '.release.prerelease') +RELEASE_TAG=$(echo "$EVENT_JSON" | jq -r '.release.tag_name') +RELEASE_BRANCH=$(echo "$EVENT_JSON" | jq -r '.release.target_commitish') +RELEASE_ID=$(echo "$EVENT_JSON" | jq -r '.release.id') +SCRIPTS_DIR="./.github/scripts" OUTPUT_DIR="$GITHUB_WORKSPACE/build" PACKAGE_NAME="esp32-$RELEASE_TAG" PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py" @@ -36,17 +41,23 @@ echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID" echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE" # Try extracting something like a JSON with a "boards" array/element and "vendor" fields -BOARDS=`echo $RELEASE_BODY | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null` -VENDOR=`echo $RELEASE_BODY | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null` -if ! [ -z "${BOARDS}" ]; then echo "Releasing board(s): $BOARDS" ; fi -if ! [ -z "${VENDOR}" ]; then echo "Setting packager: $VENDOR" ; fi +BOARDS=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null) +VENDOR=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null) -function get_file_size(){ +if [ -n "${BOARDS}" ]; then + echo "Releasing board(s): $BOARDS" +fi + +if [ -n "${VENDOR}" ]; then + echo "Setting packager: $VENDOR" +fi + +function get_file_size { local file="$1" if [[ "$OSTYPE" == "darwin"* ]]; then - eval `stat -s "$file"` + eval "$(stat -s "$file")" local res="$?" - echo "$st_size" + echo "${st_size:?}" return $res else stat --printf="%s" "$file" @@ -54,23 +65,29 @@ function get_file_size(){ fi } -function git_upload_asset(){ - local name=$(basename "$1") +function git_upload_asset { + local name + name=$(basename "$1") # local mime=$(file -b --mime-type "$1") curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name" } -function git_safe_upload_asset(){ +function git_safe_upload_asset { local file="$1" - local name=$(basename "$file") - local size=`get_file_size "$file"` - local upload_res=`git_upload_asset "$file"` - if [ $? -ne 0 ]; then + local name + local size + local upload_res + + name=$(basename "$file") + size=$(get_file_size "$file") + + if ! upload_res=$(git_upload_asset "$file"); then >&2 echo "ERROR: Failed to upload '$name' ($?)" return 1 fi - up_size=`echo "$upload_res" | jq -r '.size'` - if [ $up_size -ne $size ]; then + + up_size=$(echo "$upload_res" | jq -r '.size') + if [ "$up_size" -ne "$size" ]; then >&2 echo "ERROR: Uploaded size does not match! $up_size != $size" #git_delete_asset return 1 @@ -79,7 +96,7 @@ function git_safe_upload_asset(){ return $? } -function git_upload_to_pages(){ +function git_upload_to_pages { local path=$1 local src=$2 @@ -88,41 +105,50 @@ function git_upload_to_pages(){ return 1 fi - local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"` - local type=`echo "$info" | jq -r '.type'` - local message=$(basename $path) + local info + local type + local message local sha="" local content="" - if [ $type == "file" ]; then - sha=`echo "$info" | jq -r '.sha'` + info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages") + type=$(echo "$info" | jq -r '.type') + message=$(basename "$path") + + if [ "$type" == "file" ]; then + sha=$(echo "$info" | jq -r '.sha') sha=",\"sha\":\"$sha\"" message="Updating $message" - elif [ ! $type == "null" ]; then + elif [ ! "$type" == "null" ]; then >&2 echo "Wrong type '$type'" return 1 else message="Creating $message" fi - content=`base64 -i "$src"` + content=$(base64 -i "$src") data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}" echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" } -function git_safe_upload_to_pages(){ +function git_safe_upload_to_pages { local path=$1 local file="$2" - local name=$(basename "$file") - local size=`get_file_size "$file"` - local upload_res=`git_upload_to_pages "$path" "$file"` - if [ $? -ne 0 ]; then + local name + local size + local upload_res + + name=$(basename "$file") + size=$(get_file_size "$file") + + if ! upload_res=$(git_upload_to_pages "$path" "$file"); then >&2 echo "ERROR: Failed to upload '$name' ($?)" return 1 fi - up_size=`echo "$upload_res" | jq -r '.content.size'` - if [ $up_size -ne $size ]; then + + up_size=$(echo "$upload_res" | jq -r '.content.size') + if [ "$up_size" -ne "$size" ]; then >&2 echo "ERROR: Uploaded size does not match! $up_size != $size" #git_delete_asset return 1 @@ -131,15 +157,20 @@ function git_safe_upload_to_pages(){ return $? } -function merge_package_json(){ +function merge_package_json { local jsonLink=$1 local jsonOut=$2 local old_json=$OUTPUT_DIR/oldJson.json local merged_json=$OUTPUT_DIR/mergedJson.json + local error_code=0 echo "Downloading previous JSON $jsonLink ..." curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null - if [ $? -ne 0 ]; then echo "ERROR: Download Failed! $?"; exit 1; fi + error_code=$? + if [ $error_code -ne 0 ]; then + echo "ERROR: Download Failed! $error_code" + exit 1 + fi echo "Creating new JSON ..." set +e @@ -147,7 +178,7 @@ function merge_package_json(){ set -e set -v - if [ ! -s $merged_json ]; then + if [ ! -s "$merged_json" ]; then rm -f "$merged_json" echo "Nothing to merge" else @@ -188,9 +219,10 @@ else done # Copy only relevant variant files mkdir "$PKG_DIR/variants/" - for variant in `cat ${PKG_DIR}/boards.txt | grep "\.variant=" | cut -d= -f2` ; do + board_list=$(cat "${PKG_DIR}"/boards.txt | grep "\.variant=" | cut -d= -f2) + while IFS= read -r variant; do cp -Rf "$GITHUB_WORKSPACE/variants/${variant}" "$PKG_DIR/variants/" - done + done <<< "$board_list" fi cp -f "$GITHUB_WORKSPACE/CMakeLists.txt" "$PKG_DIR/" cp -f "$GITHUB_WORKSPACE/idf_component.yml" "$PKG_DIR/" @@ -233,34 +265,36 @@ sed "s/{runtime\.platform\.path}.tools.xtensa-esp-elf/\\{runtime.tools.$X32TC_NE sed 's/{runtime\.platform\.path}.tools.riscv32-esp-elf-gdb/\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \ sed "s/{runtime\.platform\.path}.tools.riscv32-esp-elf/\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \ sed 's/{runtime\.platform\.path}.tools.esptool/\{runtime.tools.esptool_py.path\}/g' | \ -sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' \ - > "$PKG_DIR/platform.txt" +sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' > "$PKG_DIR/platform.txt" -if ! [ -z ${VENDOR} ]; then +if [ -n "${VENDOR}" ]; then # Append vendor name to platform.txt to create a separate section sed -i "/^name=.*/s/$/ ($VENDOR)/" "$PKG_DIR/platform.txt" fi # Add header with version information echo "Generating core_version.h ..." -ver_define=`echo $RELEASE_TAG | tr "[:lower:].\055" "[:upper:]_"` -ver_hex=`git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null` -echo \#define ARDUINO_ESP32_GIT_VER 0x$ver_hex > "$PKG_DIR/cores/esp32/core_version.h" -echo \#define ARDUINO_ESP32_GIT_DESC `git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null` >> "$PKG_DIR/cores/esp32/core_version.h" -echo \#define ARDUINO_ESP32_RELEASE_$ver_define >> "$PKG_DIR/cores/esp32/core_version.h" -echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >> "$PKG_DIR/cores/esp32/core_version.h" +ver_define=$(echo "$RELEASE_TAG" | tr "[:lower:].\055" "[:upper:]_") +ver_hex=$(git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null) +echo \#define ARDUINO_ESP32_GIT_VER 0x"$ver_hex" > "$PKG_DIR/cores/esp32/core_version.h" +echo \#define ARDUINO_ESP32_GIT_DESC "$(git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null)" >> "$PKG_DIR/cores/esp32/core_version.h" +echo \#define ARDUINO_ESP32_RELEASE_"$ver_define" >> "$PKG_DIR/cores/esp32/core_version.h" +echo \#define ARDUINO_ESP32_RELEASE \""$ver_define"\" >> "$PKG_DIR/cores/esp32/core_version.h" # Compress package folder echo "Creating ZIP ..." pushd "$OUTPUT_DIR" >/dev/null zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME" -if [ $? -ne 0 ]; then echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"; exit 1; fi +if [ $? -ne 0 ]; then + echo "ERROR: Failed to create $PACKAGE_ZIP ($?)" + exit 1 +fi # Calculate SHA-256 echo "Calculating SHA sum ..." PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP" -PACKAGE_SHA=`shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' '` -PACKAGE_SIZE=`get_file_size "$PACKAGE_ZIP"` +PACKAGE_SHA=$(shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' ') +PACKAGE_SIZE=$(get_file_size "$PACKAGE_ZIP") popd >/dev/null rm -rf "$PKG_DIR" echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA" @@ -268,7 +302,7 @@ echo # Upload package to release page echo "Uploading package to release page ..." -PACKAGE_URL=`git_safe_upload_asset "$PACKAGE_PATH"` +PACKAGE_URL=$(git_safe_upload_asset "$PACKAGE_PATH") echo "Package Uploaded" echo "Download URL: $PACKAGE_URL" echo @@ -276,9 +310,9 @@ echo ## ## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS ## -RVTC_VERSION=`cat $PACKAGE_JSON_TEMPLATE | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2` +RVTC_VERSION=$(cat "$PACKAGE_JSON_TEMPLATE" | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2) # RVTC_VERSION=`date -j -f '%Y%m%d' "$RVTC_VERSION" '+%y%m'` # MacOS -RVTC_VERSION=`date -d "$RVTC_VERSION" '+%y%m'` +RVTC_VERSION=$(date -d "$RVTC_VERSION" '+%y%m') rvtc_jq_arg="\ (.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\ (.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\ @@ -303,17 +337,20 @@ jq_arg=".packages[0].platforms[0].version = \"$RELEASE_TAG\" | \ .packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\"" # Generate package JSONs -echo "Genarating $PACKAGE_JSON_DEV ..." +echo "Generating $PACKAGE_JSON_DEV ..." cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV" if [ "$RELEASE_PRE" == "false" ]; then - echo "Genarating $PACKAGE_JSON_REL ..." + echo "Generating $PACKAGE_JSON_REL ..." cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL" fi # Figure out the last release or pre-release echo "Getting previous releases ..." -releasesJson=`curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null` -if [ $? -ne 0 ]; then echo "ERROR: Get Releases Failed! ($?)"; exit 1; fi +releasesJson=$(curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null) +if [ $? -ne 0 ]; then + echo "ERROR: Get Releases Failed! ($?)" + exit 1 +fi set +e prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name") @@ -333,13 +370,13 @@ echo "Previous (any)release: $prev_any_release" echo # Merge package JSONs with previous releases -if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then +if [ -n "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then echo "Merging with JSON from $prev_any_release ..." merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV" fi if [ "$RELEASE_PRE" == "false" ]; then - if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then + if [ -n "$prev_release" ] && [ "$prev_release" != "null" ]; then echo "Merging with JSON from $prev_release ..." merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL" fi @@ -349,21 +386,30 @@ fi echo "Installing arduino-cli ..." export PATH="/home/runner/bin:$PATH" -source ./.github/scripts/install-arduino-cli.sh +source "${SCRIPTS_DIR}/install-arduino-cli.sh" echo "Testing $PACKAGE_JSON_DEV install ..." echo "Installing esp32 ..." arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_DEV" -if [ $? -ne 0 ]; then echo "ERROR: Failed to install esp32 ($?)"; exit 1; fi +if [ $? -ne 0 ]; then + echo "ERROR: Failed to install esp32 ($?)" + exit 1 +fi echo "Compiling example ..." -arduino-cli compile --fqbn esp32:esp32:esp32 $GITHUB_WORKSPACE/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino -if [ $? -ne 0 ]; then echo "ERROR: Failed to compile example ($?)"; exit 1; fi +arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino +if [ $? -ne 0 ]; then + echo "ERROR: Failed to compile example ($?)" + exit 1 +fi echo "Uninstalling esp32 ..." arduino-cli core uninstall esp32:esp32 -if [ $? -ne 0 ]; then echo "ERROR: Failed to uninstall esp32 ($?)"; exit 1; fi +if [ $? -ne 0 ]; then + echo "ERROR: Failed to uninstall esp32 ($?)" + exit 1 +fi echo "Test successful!" @@ -372,15 +418,24 @@ if [ "$RELEASE_PRE" == "false" ]; then echo "Installing esp32 ..." arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_REL" - if [ $? -ne 0 ]; then echo "ERROR: Failed to install esp32 ($?)"; exit 1; fi + if [ $? -ne 0 ]; then + echo "ERROR: Failed to install esp32 ($?)" + exit 1 + fi echo "Compiling example ..." - arduino-cli compile --fqbn esp32:esp32:esp32 $GITHUB_WORKSPACE/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino - if [ $? -ne 0 ]; then echo "ERROR: Failed to compile example ($?)"; exit 1; fi + arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino + if [ $? -ne 0 ]; then + echo "ERROR: Failed to compile example ($?)" + exit 1 + fi echo "Uninstalling esp32 ..." arduino-cli core uninstall esp32:esp32 - if [ $? -ne 0 ]; then echo "ERROR: Failed to uninstall esp32 ($?)"; exit 1; fi + if [ $? -ne 0 ]; then + echo "ERROR: Failed to uninstall esp32 ($?)" + exit 1 + fi echo "Test successful!" fi @@ -388,13 +443,13 @@ fi # Upload package JSONs echo "Uploading $PACKAGE_JSON_DEV ..." -echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV"` -echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"` +echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV")" +echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV")" echo if [ "$RELEASE_PRE" == "false" ]; then echo "Uploading $PACKAGE_JSON_REL ..." - echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL"` - echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"` + echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL")" + echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL")" echo fi diff --git a/.github/scripts/set_push_chunks.sh b/.github/scripts/set_push_chunks.sh index 11a93a7159d..047cae6efd8 100644 --- a/.github/scripts/set_push_chunks.sh +++ b/.github/scripts/set_push_chunks.sh @@ -11,23 +11,23 @@ elif [[ $LIB_CHANGED == 'true' ]]; then echo "Libraries changed. Building only affected sketches." if [[ $NETWORKING_CHANGED == 'true' ]]; then echo "Networking libraries changed. Building networking related sketches." - networking_sketches="$(find libraries/WiFi -name *.ino) " - networking_sketches+="$(find libraries/Ethernet -name *.ino) " - networking_sketches+="$(find libraries/PPP -name *.ino) " - networking_sketches+="$(find libraries/NetworkClientSecure -name *.ino) " - networking_sketches+="$(find libraries/WebServer -name *.ino) " + networking_sketches="$(find libraries/WiFi -name '*.ino') " + networking_sketches+="$(find libraries/Ethernet -name '*.ino') " + networking_sketches+="$(find libraries/PPP -name '*.ino') " + networking_sketches+="$(find libraries/NetworkClientSecure -name '*.ino') " + networking_sketches+="$(find libraries/WebServer -name '*.ino') " fi if [[ $FS_CHANGED == 'true' ]]; then echo "FS libraries changed. Building FS related sketches." - fs_sketches="$(find libraries/SD -name *.ino) " - fs_sketches+="$(find libraries/SD_MMC -name *.ino) " - fs_sketches+="$(find libraries/SPIFFS -name *.ino) " - fs_sketches+="$(find libraries/LittleFS -name *.ino) " - fs_sketches+="$(find libraries/FFat -name *.ino) " + fs_sketches="$(find libraries/SD -name '*.ino') " + fs_sketches+="$(find libraries/SD_MMC -name '*.ino') " + fs_sketches+="$(find libraries/SPIFFS -name '*.ino') " + fs_sketches+="$(find libraries/LittleFS -name '*.ino') " + fs_sketches+="$(find libraries/FFat -name '*.ino') " fi sketches="$networking_sketches $fs_sketches" for file in $LIB_FILES; do - lib=$(echo $file | awk -F "/" '{print $1"/"$2}') + lib=$(echo "$file" | awk -F "/" '{print $1"/"$2}') if [[ "$file" == *.ino ]]; then # If file ends with .ino, add it to the list of sketches echo "Sketch found: $file" @@ -36,14 +36,14 @@ elif [[ $LIB_CHANGED == 'true' ]]; then # If file is inside the src directory, find all sketches in the lib/examples directory echo "Library src file found: $file" if [[ -d $lib/examples ]]; then - lib_sketches=$(find $lib/examples -name *.ino) + lib_sketches=$(find "$lib"/examples -name '*.ino') sketches+="$lib_sketches " echo "Library sketches: $lib_sketches" fi else # If file is in a example folder but it is not a sketch, find all sketches in the current directory echo "File in example folder found: $file" - sketch=$(find $(dirname $file) -name *.ino) + sketch=$(find "$(dirname "$file")" -name '*.ino') sketches+="$sketch " echo "Sketch in example folder: $sketch" fi @@ -53,9 +53,9 @@ fi if [[ -n $sketches ]]; then # Remove duplicates - sketches=$(echo $sketches | tr ' ' '\n' | sort | uniq) + sketches=$(echo "$sketches" | tr ' ' '\n' | sort | uniq) for sketch in $sketches; do - echo $sketch >> sketches_found.txt + echo "$sketch" >> sketches_found.txt chunks_count=$((chunks_count+1)) done echo "Number of sketches found: $chunks_count" @@ -69,15 +69,17 @@ if [[ -n $sketches ]]; then fi chunks='["0"' -for i in $(seq 1 $(( $chunks_count - 1 )) ); do +for i in $(seq 1 $(( chunks_count - 1 )) ); do chunks+=",\"$i\"" done chunks+="]" -echo "build_all=$build_all" >> $GITHUB_OUTPUT -echo "build_libraries=$BUILD_LIBRARIES" >> $GITHUB_OUTPUT -echo "build_static_sketches=$BUILD_STATIC_SKETCHES" >> $GITHUB_OUTPUT -echo "build_idf=$BUILD_IDF" >> $GITHUB_OUTPUT -echo "build_platformio=$BUILD_PLATFORMIO" >> $GITHUB_OUTPUT -echo "chunk_count=$chunks_count" >> $GITHUB_OUTPUT -echo "chunks=$chunks" >> $GITHUB_OUTPUT +{ + echo "build_all=$build_all" + echo "build_libraries=$BUILD_LIBRARIES" + echo "build_static_sketches=$BUILD_STATIC_SKETCHES" + echo "build_idf=$BUILD_IDF" + echo "build_platformio=$BUILD_PLATFORMIO" + echo "chunk_count=$chunks_count" + echo "chunks=$chunks" +} >> "$GITHUB_OUTPUT" diff --git a/.github/scripts/sketch_utils.sh b/.github/scripts/sketch_utils.sh index 385322f7dfc..3e6ceb675e9 100755 --- a/.github/scripts/sketch_utils.sh +++ b/.github/scripts/sketch_utils.sh @@ -8,10 +8,12 @@ else SDKCONFIG_DIR="tools/esp32-arduino-libs" fi -function check_requirements(){ # check_requirements +function check_requirements { # check_requirements local sketchdir=$1 local sdkconfig_path=$2 local has_requirements=1 + local requirements + local requirements_or if [ ! -f "$sdkconfig_path" ] || [ ! -f "$sketchdir/ci.json" ]; then echo "ERROR: sdkconfig or ci.json not found" 1>&2 @@ -19,10 +21,10 @@ function check_requirements(){ # check_requirements # CI will fail and the user will know that the sketch has a problem. else # Check if the sketch requires any configuration options (AND) - local requirements=$(jq -r '.requires[]? // empty' "$sketchdir/ci.json") + requirements=$(jq -r '.requires[]? // empty' "$sketchdir/ci.json") if [[ "$requirements" != "null" && "$requirements" != "" ]]; then for requirement in $requirements; do - requirement=$(echo $requirement | xargs) + requirement=$(echo "$requirement" | xargs) found_line=$(grep -E "^$requirement" "$sdkconfig_path") if [[ "$found_line" == "" ]]; then has_requirements=0 @@ -31,11 +33,11 @@ function check_requirements(){ # check_requirements fi # Check if the sketch requires any configuration options (OR) - local requirements_or=$(jq -r '.requires_any[]? // empty' "$sketchdir/ci.json") + requirements_or=$(jq -r '.requires_any[]? // empty' "$sketchdir/ci.json") if [[ "$requirements_or" != "null" && "$requirements_or" != "" ]]; then local found=false for requirement in $requirements_or; do - requirement=$(echo $requirement | xargs) + requirement=$(echo "$requirement" | xargs) found_line=$(grep -E "^$requirement" "$sdkconfig_path") if [[ "$found_line" != "" ]]; then found=true @@ -51,8 +53,8 @@ function check_requirements(){ # check_requirements echo $has_requirements } -function build_sketch(){ # build_sketch [extra-options] - while [ ! -z "$1" ]; do +function build_sketch { # build_sketch [extra-options] + while [ -n "$1" ]; do case "$1" in -ai ) shift @@ -97,10 +99,10 @@ function build_sketch(){ # build_sketch [ex shift done - xtra_opts=$* + xtra_opts=("$@") len=0 - if [ -z $sketchdir ]; then + if [ -z "$sketchdir" ]; then echo "ERROR: Sketch directory not provided" echo "$USAGE" exit 1 @@ -108,8 +110,8 @@ function build_sketch(){ # build_sketch [ex # No FQBN was passed, try to get it from other options - if [ -z $fqbn ]; then - if [ -z $target ]; then + if [ -z "$fqbn" ]; then + if [ -z "$target" ]; then echo "ERROR: Unspecified chip" echo "$USAGE" exit 1 @@ -120,25 +122,25 @@ function build_sketch(){ # build_sketch [ex # precedence. Note that the following logic also falls to the default # parameters if no arguments were passed and no file was found. - if [ -z $options ] && [ -f $sketchdir/ci.json ]; then + if [ -z "$options" ] && [ -f "$sketchdir"/ci.json ]; then # The config file could contain multiple FQBNs for one chip. If # that's the case we build one time for every FQBN. - len=`jq -r --arg target $target '.fqbn[$target] | length' $sketchdir/ci.json` - if [ $len -gt 0 ]; then - fqbn=`jq -r --arg target $target '.fqbn[$target] | sort' $sketchdir/ci.json` + len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json) + if [ "$len" -gt 0 ]; then + fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | sort' "$sketchdir"/ci.json) fi fi - if [ ! -z $options ] || [ $len -eq 0 ]; then + if [ -n "$options" ] || [ "$len" -eq 0 ]; then # Since we are passing options, we will end up with only one FQBN to # build. len=1 - if [ -f $sketchdir/ci.json ]; then - fqbn_append=`jq -r '.fqbn_append' $sketchdir/ci.json` - if [ $fqbn_append == "null" ]; then + if [ -f "$sketchdir"/ci.json ]; then + fqbn_append=$(jq -r '.fqbn_append' "$sketchdir"/ci.json) + if [ "$fqbn_append" == "null" ]; then fqbn_append="" fi fi @@ -189,6 +191,10 @@ function build_sketch(){ # build_sketch [ex [ -n "${options:-$esp32p4_opts}" ] && opt=":${options:-$esp32p4_opts}" fqbn="espressif:esp32:esp32p4$opt" ;; + *) + echo "ERROR: Invalid chip: $target" + exit 1 + ;; esac # Make it look like a JSON array. @@ -207,7 +213,7 @@ function build_sketch(){ # build_sketch [ex exit 1 fi - # The directory that will hold all the artifcats (the build directory) is + # The directory that will hold all the artifacts (the build directory) is # provided through: # 1. An env variable called ARDUINO_BUILD_DIR. # 2. Created at the sketch level as "build" in the case of a single @@ -215,17 +221,18 @@ function build_sketch(){ # build_sketch [ex # 3. Created at the sketch level as "buildX" where X is the number # of configuration built in case of a multiconfiguration test. - sketchname=$(basename $sketchdir) + sketchname=$(basename "$sketchdir") + local has_requirements - if [ -f $sketchdir/ci.json ]; then + if [ -f "$sketchdir"/ci.json ]; then # If the target is listed as false, skip the sketch. Otherwise, include it. - is_target=$(jq -r --arg target $target '.targets[$target]' $sketchdir/ci.json) + is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json) if [[ "$is_target" == "false" ]]; then echo "Skipping $sketchname for target $target" exit 0 fi - local has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig") + has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig") if [ "$has_requirements" == "0" ]; then echo "Target $target does not meet the requirements for $sketchname. Skipping." exit 0 @@ -235,7 +242,7 @@ function build_sketch(){ # build_sketch [ex ARDUINO_CACHE_DIR="$HOME/.arduino/cache.tmp" if [ -n "$ARDUINO_BUILD_DIR" ]; then build_dir="$ARDUINO_BUILD_DIR" - elif [ $len -eq 1 ]; then + elif [ "$len" -eq 1 ]; then # build_dir="$sketchdir/build" build_dir="$HOME/.arduino/tests/$sketchname/build.tmp" fi @@ -244,51 +251,49 @@ function build_sketch(){ # build_sketch [ex sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json" mkdir -p "$ARDUINO_CACHE_DIR" - for i in `seq 0 $(($len - 1))` - do - if [ $len -ne 1 ]; then - # build_dir="$sketchdir/build$i" - build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp" + for i in $(seq 0 $((len - 1))); do + if [ "$len" -ne 1 ]; then + # build_dir="$sketchdir/build$i" + build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp" fi - rm -rf $build_dir - mkdir -p $build_dir + rm -rf "$build_dir" + mkdir -p "$build_dir" - currfqbn=`echo $fqbn | jq -r --argjson i $i '.[$i]'` + currfqbn=$(echo "$fqbn" | jq -r --argjson i "$i" '.[$i]') if [ -f "$ide_path/arduino-cli" ]; then echo "Building $sketchname with arduino-cli and FQBN=$currfqbn" - curroptions=`echo "$currfqbn" | cut -d':' -f4` - currfqbn=`echo "$currfqbn" | cut -d':' -f1-3` - $ide_path/arduino-cli compile \ + curroptions=$(echo "$currfqbn" | cut -d':' -f4) + currfqbn=$(echo "$currfqbn" | cut -d':' -f1-3) + "$ide_path"/arduino-cli compile \ --fqbn "$currfqbn" \ --board-options "$curroptions" \ --warnings "all" \ --build-property "compiler.warning_flags.all=-Wall -Werror=all -Wextra" \ - --build-cache-path "$ARDUINO_CACHE_DIR" \ --build-path "$build_dir" \ - $xtra_opts "${sketchdir}" \ - 2>&1 | tee $output_file + "${xtra_opts[@]}" "${sketchdir}" \ + 2>&1 | tee "$output_file" exit_status=${PIPESTATUS[0]} - if [ $exit_status -ne 0 ]; then + if [ "$exit_status" -ne 0 ]; then echo "ERROR: Compilation failed with error code $exit_status" - exit $exit_status + exit "$exit_status" fi - if [ $log_compilation ]; then + if [ -n "$log_compilation" ]; then #Extract the program storage space and dynamic memory usage in bytes and percentage in separate variables from the output, just the value without the string - flash_bytes=$(grep -oE 'Sketch uses ([0-9]+) bytes' $output_file | awk '{print $3}') - flash_percentage=$(grep -oE 'Sketch uses ([0-9]+) bytes \(([0-9]+)%\)' $output_file | awk '{print $5}' | tr -d '(%)') - ram_bytes=$(grep -oE 'Global variables use ([0-9]+) bytes' $output_file | awk '{print $4}') - ram_percentage=$(grep -oE 'Global variables use ([0-9]+) bytes \(([0-9]+)%\)' $output_file | awk '{print $6}' | tr -d '(%)') + flash_bytes=$(grep -oE 'Sketch uses ([0-9]+) bytes' "$output_file" | awk '{print $3}') + flash_percentage=$(grep -oE 'Sketch uses ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $5}' | tr -d '(%)') + ram_bytes=$(grep -oE 'Global variables use ([0-9]+) bytes' "$output_file" | awk '{print $4}') + ram_percentage=$(grep -oE 'Global variables use ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $6}' | tr -d '(%)') # Extract the directory path excluding the filename directory_path=$(dirname "$sketch") # Define the constant part constant_part="/home/runner/Arduino/hardware/espressif/esp32/libraries/" - # Extract the desired substring using sed - lib_sketch_name=$(echo "$directory_path" | sed "s|$constant_part||") + # Extract the desired substring + lib_sketch_name="${directory_path#"$constant_part"}" #append json file where key is fqbn, sketch name, sizes -> extracted values echo "{\"name\": \"$lib_sketch_name\", \"sizes\": [{ @@ -304,15 +309,15 @@ function build_sketch(){ # build_sketch [ex echo "Building $sketchname with arduino-builder and FQBN=$currfqbn" echo "Build path = $build_dir" - $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \ - -fqbn=\"$currfqbn\" \ + "$ide_path"/arduino-builder -compile -logger=human -core-api-version=10810 \ + -fqbn=\""$currfqbn"\" \ -warnings="all" \ -tools "$ide_path/tools-builder" \ -hardware "$user_path/hardware" \ -libraries "$user_path/libraries" \ -build-cache "$ARDUINO_CACHE_DIR" \ -build-path "$build_dir" \ - $xtra_opts "${sketchdir}/${sketchname}.ino" + "${xtra_opts[@]}" "${sketchdir}/${sketchname}.ino" exit_status=$? if [ $exit_status -ne 0 ]; then @@ -339,15 +344,16 @@ function build_sketch(){ # build_sketch [ex unset options } -function count_sketches(){ # count_sketches [target] [file] [ignore-requirements] +function count_sketches { # count_sketches [target] [file] [ignore-requirements] local path=$1 local target=$2 local ignore_requirements=$3 local file=$4 + local sketches if [ $# -lt 1 ]; then - echo "ERROR: Illegal number of parameters" - echo "USAGE: ${0} count [target]" + echo "ERROR: Illegal number of parameters" + echo "USAGE: ${0} count [target]" fi rm -rf sketches.txt @@ -357,42 +363,47 @@ function count_sketches(){ # count_sketches [target] [file] [ignore-requi fi if [ -f "$file" ]; then - local sketches=$(cat $file) + sketches=$(cat "$file") else - local sketches=$(find $path -name *.ino | sort) + sketches=$(find "$path" -name '*.ino' | sort) fi local sketchnum=0 for sketch in $sketches; do - local sketchdir=$(dirname $sketch) - local sketchdirname=$(basename $sketchdir) - local sketchname=$(basename $sketch) + local sketchdir + local sketchdirname + local sketchname + local has_requirements + + sketchdir=$(dirname "$sketch") + sketchdirname=$(basename "$sketchdir") + sketchname=$(basename "$sketch") + if [[ "$sketchdirname.ino" != "$sketchname" ]]; then continue elif [[ -n $target ]] && [[ -f $sketchdir/ci.json ]]; then # If the target is listed as false, skip the sketch. Otherwise, include it. - is_target=$(jq -r --arg target $target '.targets[$target]' $sketchdir/ci.json) + is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json) if [[ "$is_target" == "false" ]]; then continue fi if [ "$ignore_requirements" != "1" ]; then - local has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig") + has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig") if [ "$has_requirements" == "0" ]; then continue fi fi fi - echo $sketch >> sketches.txt - sketchnum=$(($sketchnum + 1)) + echo "$sketch" >> sketches.txt + sketchnum=$((sketchnum + 1)) done return $sketchnum } -function build_sketches(){ # build_sketches [extra-options] - - local args="" - while [ ! -z "$1" ]; do +function build_sketches { # build_sketches [extra-options] + local args=() + while [ -n "$1" ]; do case $1 in -ai ) shift @@ -405,12 +416,12 @@ function build_sketches(){ # build_sketches info/wokwi_types.txt echo "[$targets]" > info/targets.txt -echo "build-types=[$build_types]" >> $GITHUB_OUTPUT -echo "hw-types=[$hw_types]" >> $GITHUB_OUTPUT -echo "wokwi-types=[$wokwi_types]" >> $GITHUB_OUTPUT -echo "qemu-types=[$qemu_types]" >> $GITHUB_OUTPUT -echo "targets=[$targets]" >> $GITHUB_OUTPUT +{ + echo "build-types=[$build_types]" + echo "hw-types=[$hw_types]" + echo "wokwi-types=[$wokwi_types]" + echo "qemu-types=[$qemu_types]" + echo "targets=[$targets]" +} >> "$GITHUB_OUTPUT" diff --git a/.github/scripts/tests_run.sh b/.github/scripts/tests_run.sh index 16b0f2fb500..513fd16b371 100755 --- a/.github/scripts/tests_run.sh +++ b/.github/scripts/tests_run.sh @@ -1,126 +1,130 @@ #!/bin/bash -function run_test() { +function run_test { local target=$1 local sketch=$2 local options=$3 local erase_flash=$4 - local sketchdir=$(dirname $sketch) - local sketchname=$(basename $sketchdir) + local sketchdir + local sketchname local result=0 local error=0 local sdkconfig_path + local extra_args - if [ $options -eq 0 ] && [ -f $sketchdir/ci.json ]; then - len=`jq -r --arg target $target '.fqbn[$target] | length' $sketchdir/ci.json` - if [ $len -eq 0 ]; then + sketchdir=$(dirname "$sketch") + sketchname=$(basename "$sketchdir") + + if [ "$options" -eq 0 ] && [ -f "$sketchdir"/ci.json ]; then + len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json) + if [ "$len" -eq 0 ]; then len=1 fi else len=1 fi - if [ $len -eq 1 ]; then + if [ "$len" -eq 1 ]; then sdkconfig_path="$HOME/.arduino/tests/$sketchname/build.tmp/sdkconfig" else sdkconfig_path="$HOME/.arduino/tests/$sketchname/build0.tmp/sdkconfig" fi - if [ -f $sketchdir/ci.json ]; then + if [ -f "$sketchdir"/ci.json ]; then # If the target or platform is listed as false, skip the sketch. Otherwise, include it. - is_target=$(jq -r --arg target $target '.targets[$target]' $sketchdir/ci.json) - selected_platform=$(jq -r --arg platform $platform '.platforms[$platform]' $sketchdir/ci.json) + is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json) + selected_platform=$(jq -r --arg platform "$platform" '.platforms[$platform]' "$sketchdir"/ci.json) if [[ $is_target == "false" ]] || [[ $selected_platform == "false" ]]; then - printf "\033[93mSkipping $sketchname test for $target, platform: $platform\033[0m\n" + printf "\033[93mSkipping %s test for %s, platform: %s\033[0m\n" "$sketchname" "$target" "$platform" printf "\n\n\n" return 0 fi fi - if [ ! -f $sdkconfig_path ]; then - printf "\033[93mSketch $sketchname not built\nMight be due to missing target requirements or build failure\033[0m\n" + if [ ! -f "$sdkconfig_path" ]; then + printf "\033[93mSketch %s not built\nMight be due to missing target requirements or build failure\033[0m\n" "$sketchname" printf "\n\n\n" return 0 fi - local right_target=$(grep -E "^CONFIG_IDF_TARGET=\"$target\"$" "$sdkconfig_path") + local right_target + right_target=$(grep -E "^CONFIG_IDF_TARGET=\"$target\"$" "$sdkconfig_path") if [ -z "$right_target" ]; then - printf "\033[91mError: Sketch $sketchname compiled for different target\n\033[0m\n" + printf "\033[91mError: Sketch %s compiled for different target\n\033[0m\n" "$sketchname" printf "\n\n\n" return 1 fi - if [ $len -eq 1 ]; then - # build_dir="$sketchdir/build" - build_dir="$HOME/.arduino/tests/$sketchname/build.tmp" - report_file="$sketchdir/$target/$sketchname.xml" + if [ "$len" -eq 1 ]; then + # build_dir="$sketchdir/build" + build_dir="$HOME/.arduino/tests/$sketchname/build.tmp" + report_file="$sketchdir/$target/$sketchname.xml" fi - for i in `seq 0 $(($len - 1))` - do + for i in $(seq 0 $((len - 1))); do fqbn="Default" - if [ $len -ne 1 ]; then - fqbn=`jq -r --arg target $target --argjson i $i '.fqbn[$target] | sort | .[$i]' $sketchdir/ci.json` - elif [ -f $sketchdir/ci.json ]; then - has_fqbn=`jq -r --arg target $target '.fqbn[$target]' $sketchdir/ci.json` + if [ "$len" -ne 1 ]; then + fqbn=$(jq -r --arg target "$target" --argjson i "$i" '.fqbn[$target] | sort | .[$i]' "$sketchdir"/ci.json) + elif [ -f "$sketchdir"/ci.json ]; then + has_fqbn=$(jq -r --arg target "$target" '.fqbn[$target]' "$sketchdir"/ci.json) if [ "$has_fqbn" != "null" ]; then - fqbn=`jq -r --arg target $target '.fqbn[$target] | .[0]' $sketchdir/ci.json` + fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | .[0]' "$sketchdir"/ci.json) fi fi - printf "\033[95mRunning test: $sketchname -- Config: $fqbn\033[0m\n" - if [ $erase_flash -eq 1 ]; then - esptool.py -c $target erase_flash + printf "\033[95mRunning test: %s -- Config: %s\033[0m\n" "$sketchname" "$fqbn" + if [ "$erase_flash" -eq 1 ]; then + esptool.py -c "$target" erase_flash fi - if [ $len -ne 1 ]; then + if [ "$len" -ne 1 ]; then # build_dir="$sketchdir/build$i" build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp" report_file="$sketchdir/$target/$sketchname$i.xml" fi if [ $platform == "wokwi" ]; then - extra_args="--target $target --embedded-services arduino,wokwi --wokwi-timeout=$wokwi_timeout" + extra_args=("--target" "$target" "--embedded-services" "arduino,wokwi" "--wokwi-timeout=$wokwi_timeout") if [[ -f "$sketchdir/scenario.yaml" ]]; then - extra_args+=" --wokwi-scenario $sketchdir/scenario.yaml" + extra_args+=("--wokwi-scenario" "$sketchdir/scenario.yaml") fi if [[ -f "$sketchdir/diagram.$target.json" ]]; then - extra_args+=" --wokwi-diagram $sketchdir/diagram.$target.json" + extra_args+=("--wokwi-diagram" "$sketchdir/diagram.$target.json") fi elif [ $platform == "qemu" ]; then PATH=$HOME/qemu/bin:$PATH - extra_args="--embedded-services qemu --qemu-image-path $build_dir/$sketchname.ino.merged.bin" + extra_args=("--embedded-services" "qemu" "--qemu-image-path" "$build_dir/$sketchname.ino.merged.bin") - if [ $target == "esp32" ] || [ $target == "esp32s3" ]; then - extra_args+=" --qemu-prog-path qemu-system-xtensa --qemu-cli-args=\"-machine $target -m 4M -nographic\"" - elif [ $target == "esp32c3" ]; then - extra_args+=" --qemu-prog-path qemu-system-riscv32 --qemu-cli-args=\"-machine $target -icount 3 -nographic\"" + if [ "$target" == "esp32" ] || [ "$target" == "esp32s3" ]; then + extra_args+=("--qemu-prog-path" "qemu-system-xtensa" "--qemu-cli-args=\"-machine $target -m 4M -nographic\"") + elif [ "$target" == "esp32c3" ]; then + extra_args+=("--qemu-prog-path" "qemu-system-riscv32" "--qemu-cli-args=\"-machine $target -icount 3 -nographic\"") else - printf "\033[91mUnsupported QEMU target: $target\033[0m\n" + printf "\033[91mUnsupported QEMU target: %s\033[0m\n" "$target" exit 1 fi else - extra_args="--embedded-services esp,arduino" + extra_args=("--embedded-services" "esp,arduino") fi - rm $sketchdir/diagram.json 2>/dev/null || true + rm "$sketchdir"/diagram.json 2>/dev/null || true result=0 - printf "\033[95mpytest $sketchdir/test_$sketchname.py --build-dir $build_dir --junit-xml=$report_file $extra_args\033[0m\n" - bash -c "set +e; pytest $sketchdir/test_$sketchname.py --build-dir $build_dir --junit-xml=$report_file $extra_args; exit \$?" || result=$? + printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "${extra_args[*]@Q}" + bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" ${extra_args[*]@Q}; exit \$?" || result=$? printf "\n" if [ $result -ne 0 ]; then result=0 - printf "\033[95mRetrying test: $sketchname -- Config: $i\033[0m\n" - printf "\033[95mpytest $sketchdir/test_$sketchname.py --build-dir $build_dir --junit-xml=$report_file $extra_args\033[0m\n" - bash -c "set +e; pytest $sketchdir/test_$sketchname.py --build-dir $build_dir --junit-xml=$report_file $extra_args; exit \$?" || result=$? + printf "\033[95mRetrying test: %s -- Config: %s\033[0m\n" "$sketchname" "$i" + printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "${extra_args[*]@Q}" + bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" ${extra_args[*]@Q}; exit \$?" || result=$? printf "\n" if [ $result -ne 0 ]; then - printf "\033[91mFailed test: $sketchname -- Config: $i\033[0m\n\n" - error=$result + printf "\033[91mFailed test: %s -- Config: %s\033[0m\n\n" "$sketchname" "$i" + error=$result fi fi done @@ -136,13 +140,13 @@ chunk_run=0 options=0 erase=0 -while [ ! -z "$1" ]; do +while [ -n "$1" ]; do case $1 in -c ) chunk_run=1 ;; -Q ) - if [ ! -d $QEMU_PATH ]; then + if [ ! -d "$QEMU_PATH" ]; then echo "QEMU path $QEMU_PATH does not exist" exit 1 fi @@ -188,98 +192,97 @@ while [ ! -z "$1" ]; do test_type=$1 ;; * ) - break - ;; + break + ;; esac shift done if [ ! $platform == "qemu" ]; then - source ${SCRIPTS_DIR}/install-arduino-ide.sh + source "${SCRIPTS_DIR}/install-arduino-ide.sh" fi # If sketch is provided and test type is not, test type is inferred from the sketch path if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then if [ -n "$sketch" ]; then - tmp_sketch_path=$(find tests -name $sketch.ino) - test_type=$(basename $(dirname $(dirname "$tmp_sketch_path"))) + tmp_sketch_path=$(find tests -name "$sketch".ino) + test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")") echo "Sketch $sketch test type: $test_type" test_folder="$PWD/tests/$test_type" else - test_folder="$PWD/tests" + test_folder="$PWD/tests" fi else test_folder="$PWD/tests/$test_type" fi if [ $chunk_run -eq 0 ]; then - if [ -z $sketch ]; then + if [ -z "$sketch" ]; then echo "ERROR: Sketch name is required for single test run" exit 1 fi - run_test $target $test_folder/$sketch/$sketch.ino $options $erase + run_test "$target" "$test_folder"/"$sketch"/"$sketch".ino $options $erase exit $? else - if [ "$chunk_max" -le 0 ]; then - echo "ERROR: Chunks count must be positive number" - exit 1 - fi - - if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then - echo "ERROR: Chunk index must be less than chunks count" - exit 1 - fi - - set +e - # Ignore requirements as we don't have the libs. The requirements will be checked in the run_test function - ${COUNT_SKETCHES} "$test_folder" "$target" "1" - sketchcount=$? - set -e - sketches=$(cat sketches.txt) - rm -rf sketches.txt - - chunk_size=$(( $sketchcount / $chunk_max )) - all_chunks=$(( $chunk_max * $chunk_size )) - if [ "$all_chunks" -lt "$sketchcount" ]; then - chunk_size=$(( $chunk_size + 1 )) - fi - - start_index=0 - end_index=0 - if [ "$chunk_index" -ge "$chunk_max" ]; then - start_index=$chunk_index - end_index=$sketchcount - else - start_index=$(( $chunk_index * $chunk_size )) - if [ "$sketchcount" -le "$start_index" ]; then - exit 0 - fi - - end_index=$(( $(( $chunk_index + 1 )) * $chunk_size )) - if [ "$end_index" -gt "$sketchcount" ]; then - end_index=$sketchcount - fi - fi - - start_num=$(( $start_index + 1 )) - sketchnum=0 - error=0 - - for sketch in $sketches; do - - sketchnum=$(($sketchnum + 1)) - if [ "$sketchnum" -le "$start_index" ] \ - || [ "$sketchnum" -gt "$end_index" ]; then - continue - fi - - printf "\033[95mSketch Index $(($sketchnum - 1))\033[0m\n" - - exit_code=0 - run_test $target $sketch $options $erase || exit_code=$? - if [ $exit_code -ne 0 ]; then - error=$exit_code - fi - done - exit $error + if [ "$chunk_max" -le 0 ]; then + echo "ERROR: Chunks count must be positive number" + exit 1 + fi + + if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then + echo "ERROR: Chunk index must be less than chunks count" + exit 1 + fi + + set +e + # Ignore requirements as we don't have the libs. The requirements will be checked in the run_test function + ${COUNT_SKETCHES} "$test_folder" "$target" "1" + sketchcount=$? + set -e + sketches=$(cat sketches.txt) + rm -rf sketches.txt + + chunk_size=$(( sketchcount / chunk_max )) + all_chunks=$(( chunk_max * chunk_size )) + if [ "$all_chunks" -lt "$sketchcount" ]; then + chunk_size=$(( chunk_size + 1 )) + fi + + start_index=0 + end_index=0 + if [ "$chunk_index" -ge "$chunk_max" ]; then + start_index=$chunk_index + end_index=$sketchcount + else + start_index=$(( chunk_index * chunk_size )) + if [ "$sketchcount" -le "$start_index" ]; then + exit 0 + fi + + end_index=$(( $(( chunk_index + 1 )) * chunk_size )) + if [ "$end_index" -gt "$sketchcount" ]; then + end_index=$sketchcount + fi + fi + + sketchnum=0 + error=0 + + for sketch in $sketches; do + + sketchnum=$((sketchnum + 1)) + if [ "$sketchnum" -le "$start_index" ] \ + || [ "$sketchnum" -gt "$end_index" ]; then + continue + fi + + printf "\033[95mSketch Index %s\033[0m\n" "$((sketchnum - 1))" + + exit_code=0 + run_test "$target" "$sketch" $options $erase || exit_code=$? + if [ $exit_code -ne 0 ]; then + error=$exit_code + fi + done + exit $error fi diff --git a/.github/scripts/update-version.sh b/.github/scripts/update-version.sh index cbc31378b15..9a38b27a57a 100755 --- a/.github/scripts/update-version.sh +++ b/.github/scripts/update-version.sh @@ -1,20 +1,21 @@ #!/bin/bash +# shellcheck disable=SC2002 # For reference: add tools for all boards by replacing one line in each board # "[board].upload.tool=esptool_py" to "[board].upload.tool=esptool_py\n[board].upload.tool.default=esptool_py\n[board].upload.tool.network=esp_ota" #cat boards.txt | sed "s/\([a-zA-Z0-9_\-]*\)\.upload\.tool\=esptool_py/\1\.upload\.tool\=esptool_py\\n\1\.upload\.tool\.default\=esptool_py\\n\1\.upload\.tool\.network\=esp_ota/" if [ ! $# -eq 3 ]; then - echo "Bad number of arguments: $#" >&2 - echo "usage: $0 " >&2 - exit 1 + echo "Bad number of arguments: $#" >&2 + echo "usage: $0 " >&2 + exit 1 fi re='^[0-9]+$' if [[ ! $1 =~ $re ]] || [[ ! $2 =~ $re ]] || [[ ! $3 =~ $re ]] ; then - echo "error: Not a valid version: $1.$2.$3" >&2 - echo "usage: $0 " >&2 - exit 1 + echo "error: Not a valid version: $1.$2.$3" >&2 + echo "usage: $0 " >&2 + exit 1 fi ESP_ARDUINO_VERSION_MAJOR="$1" @@ -36,11 +37,12 @@ sed "s/#define ESP_ARDUINO_VERSION_MAJOR.*/#define ESP_ARDUINO_VERSION_MAJOR $ES sed "s/#define ESP_ARDUINO_VERSION_MINOR.*/#define ESP_ARDUINO_VERSION_MINOR $ESP_ARDUINO_VERSION_MINOR/g" | \ sed "s/#define ESP_ARDUINO_VERSION_PATCH.*/#define ESP_ARDUINO_VERSION_PATCH $ESP_ARDUINO_VERSION_PATCH/g" > __esp_arduino_version.h && mv __esp_arduino_version.h cores/esp32/esp_arduino_version.h -for lib in `ls libraries`; do - if [ -f "libraries/$lib/library.properties" ]; then - echo "Updating Library $lib..." - cat "libraries/$lib/library.properties" | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > "libraries/$lib/__library.properties" && mv "libraries/$lib/__library.properties" "libraries/$lib/library.properties" - fi +libraries=$(find libraries -maxdepth 1 -mindepth 1 -type d -exec basename {} \;) +for lib in $libraries; do + if [ -f "libraries/$lib/library.properties" ]; then + echo "Updating Library $lib..." + cat "libraries/$lib/library.properties" | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > "libraries/$lib/__library.properties" && mv "libraries/$lib/__library.properties" "libraries/$lib/library.properties" + fi done exit 0 diff --git a/.github/scripts/upload_py_tools.sh b/.github/scripts/upload_py_tools.sh index 0544bccd710..abe18a50c6e 100755 --- a/.github/scripts/upload_py_tools.sh +++ b/.github/scripts/upload_py_tools.sh @@ -1,11 +1,12 @@ #!/bin/bash + CHANGED_FILES=$1 echo "Pushing '$CHANGED_FILES' as github-actions[bot]" git config --global github.user "github-actions[bot]" git config --global user.name "github-actions[bot]" git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" for tool in $CHANGED_FILES; do - git add tools/$tool.exe + git add tools/"$tool".exe done git commit -m "change(tools): Push generated binaries to PR" git push diff --git a/.github/workflows/allboards.yml b/.github/workflows/allboards.yml index 4a2d4349ac3..8c4dadcd03e 100644 --- a/.github/workflows/allboards.yml +++ b/.github/workflows/allboards.yml @@ -1,6 +1,6 @@ name: Boards Test - Remote trigger -# The workflow will run on remote dispath with event-type set to "test-boards" +# The workflow will run on remote dispatch with event-type set to "test-boards" on: repository_dispatch: types: [test-boards] @@ -20,8 +20,7 @@ jobs: ref: ${{ github.event.client_payload.branch }} - name: Get boards fqbns - run: - bash .github/scripts/find_all_boards.sh + run: bash .github/scripts/find_all_boards.sh setup-chunks: needs: find-boards @@ -43,8 +42,7 @@ jobs: - id: set-test-chunks name: Set Chunks - run: - echo "test-chunks<> $GITHUB_OUTPUT + run: echo "test-chunks<> $GITHUB_OUTPUT echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT @@ -61,7 +59,7 @@ jobs: strategy: fail-fast: false - matrix: + matrix: chunk: ${{ fromJSON(needs.setup-chunks.outputs['test-chunks']) }} steps: @@ -71,9 +69,8 @@ jobs: ref: ${{ github.event.client_payload.branch }} - name: Echo FQBNS to file - run: - echo "$FQBN" > fqbns.json - env: + run: echo "$FQBN" > fqbns.json + env: FQBN: ${{ toJSON(matrix.chunk) }} - name: Compile sketch @@ -88,5 +85,4 @@ jobs: enable-warnings-report: false cli-compile-flags: | - --warnings="all" - sketch-paths: - "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" + sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" diff --git a/.github/workflows/boards.yml b/.github/workflows/boards.yml index a309e4ed2ce..a51c794cfb4 100644 --- a/.github/workflows/boards.yml +++ b/.github/workflows/boards.yml @@ -4,9 +4,9 @@ name: Boards Test on: pull_request: paths: - - 'boards.txt' - - 'libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino' - - '.github/workflows/boards.yml' + - "boards.txt" + - "libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" + - ".github/workflows/boards.yml" env: # It's convenient to set variables for values used multiple times in the workflow @@ -28,8 +28,7 @@ jobs: uses: dcarbone/install-jq-action@v1.0.1 - name: Get board name - run: - bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}} + run: bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}} test-boards: needs: find-boards @@ -72,7 +71,7 @@ jobs: ./tools/openocd-esp32 ./tools/riscv32-* ./tools/xtensa-* - + - name: Compile sketch uses: P-R-O-C-H-Y/compile-sketches@main with: @@ -85,6 +84,5 @@ jobs: cli-compile-flags: | - --warnings="all" exit-on-fail: true - sketch-paths: - "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" + sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" verbose: true diff --git a/.github/workflows/build_py_tools.yml b/.github/workflows/build_py_tools.yml index 37742d15224..48e7f2c82d3 100644 --- a/.github/workflows/build_py_tools.yml +++ b/.github/workflows/build_py_tools.yml @@ -3,11 +3,11 @@ name: Build Python Tools on: pull_request: paths: - - '.github/workflows/build_py_tools.yml' - - 'tools/get.py' - - 'tools/espota.py' - - 'tools/gen_esp32part.py' - - 'tools/gen_insights_package.py' + - ".github/workflows/build_py_tools.yml" + - "tools/get.py" + - "tools/espota.py" + - "tools/gen_esp32part.py" + - "tools/gen_insights_package.py" jobs: find-changed-tools: @@ -33,8 +33,8 @@ jobs: uses: tj-actions/changed-files@v41 id: verify-changed-files with: - fetch_depth: '2' - since_last_remote_commit: 'true' + fetch_depth: "2" + since_last_remote_commit: "true" files: | tools/get.py tools/espota.py @@ -57,20 +57,20 @@ jobs: matrix: os: [windows-latest, macos-latest, ubuntu-20.04, ARM] include: - - os: windows-latest - TARGET: win64 - EXTEN: .exe - SEPARATOR: ';' - - os: macos-latest - TARGET: macos - SEPARATOR: ':' - - os: ubuntu-20.04 - TARGET: linux-amd64 - SEPARATOR: ':' - - os: ARM - CONTAINER: python:3.8-bullseye - TARGET: arm - SEPARATOR: ':' + - os: windows-latest + TARGET: win64 + EXTEN: .exe + SEPARATOR: ";" + - os: macos-latest + TARGET: macos + SEPARATOR: ":" + - os: ubuntu-20.04 + TARGET: linux-amd64 + SEPARATOR: ":" + - os: ARM + CONTAINER: python:3.8-bullseye + TARGET: arm + SEPARATOR: ":" container: ${{ matrix.CONTAINER }} # use python container on ARM env: DISTPATH: pytools-${{ matrix.TARGET }} diff --git a/.github/workflows/dangerjs.yml b/.github/workflows/dangerjs.yml index 75c046731f3..f4bdad3c16e 100644 --- a/.github/workflows/dangerjs.yml +++ b/.github/workflows/dangerjs.yml @@ -11,14 +11,14 @@ jobs: pull-request-style-linter: runs-on: ubuntu-latest steps: - - name: Check out PR head - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} + - name: Check out PR head + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - - name: DangerJS pull request linter - uses: espressif/shared-github-dangerjs@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - rule-max-commits: 'false' - commit-messages-min-summary-length: '10' + - name: DangerJS pull request linter + uses: espressif/shared-github-dangerjs@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + rule-max-commits: "false" + commit-messages-min-summary-length: "10" diff --git a/.github/workflows/docs_build.yml b/.github/workflows/docs_build.yml index ffa5a0b4580..d2f12e1f7b5 100644 --- a/.github/workflows/docs_build.yml +++ b/.github/workflows/docs_build.yml @@ -3,18 +3,17 @@ name: Documentation Build and Deploy CI on: push: branches: - - master - - release/v2.x + - master + - release/v2.x paths: - - 'docs/**' - - '.github/workflows/docs_build.yml' + - "docs/**" + - ".github/workflows/docs_build.yml" pull_request: paths: - - 'docs/**' - - '.github/workflows/docs_build.yml' + - "docs/**" + - ".github/workflows/docs_build.yml" jobs: - build-docs: name: Build ESP-Docs runs-on: ubuntu-22.04 @@ -22,25 +21,25 @@ jobs: run: shell: bash steps: - - uses: actions/checkout@v4 - with: - submodules: true - - uses: actions/setup-python@v5 - with: - cache-dependency-path: docs/requirements.txt - cache: 'pip' - python-version: '3.10' - - name: Build - run: | - sudo apt update - sudo apt install python3-pip python3-setuptools - # GitHub CI installs pip3 and setuptools outside the path. - # Update the path to include them and run. - cd ./docs - PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary - PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en - - name: Archive Docs - uses: actions/upload-artifact@v4 - with: - name: docs - path: docs + - uses: actions/checkout@v4 + with: + submodules: true + - uses: actions/setup-python@v5 + with: + cache-dependency-path: docs/requirements.txt + cache: "pip" + python-version: "3.10" + - name: Build + run: | + sudo apt update + sudo apt install python3-pip python3-setuptools + # GitHub CI installs pip3 and setuptools outside the path. + # Update the path to include them and run. + cd ./docs + PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary + PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en + - name: Archive Docs + uses: actions/upload-artifact@v4 + with: + name: docs + path: docs diff --git a/.github/workflows/docs_deploy.yml b/.github/workflows/docs_deploy.yml index 5393f7e8a20..b558fd21aa5 100644 --- a/.github/workflows/docs_deploy.yml +++ b/.github/workflows/docs_deploy.yml @@ -7,11 +7,11 @@ on: - completed push: branches: - - release/v2.x - - master + - release/v2.x + - master paths: - - 'docs/**' - - '.github/workflows/docs_deploy.yml' + - "docs/**" + - ".github/workflows/docs_deploy.yml" jobs: deploy-prod-docs: @@ -21,39 +21,39 @@ jobs: run: shell: bash steps: - - name: Check if release workflow is successful - if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }} - run: | - echo "Release workflow failed. Exiting..." - exit 1 - - uses: actions/checkout@v4 - with: - submodules: true - - uses: actions/setup-python@v5 - with: - cache-dependency-path: docs/requirements.txt - cache: 'pip' - python-version: '3.10' - - name: Deploy Documentation - env: - # Deploy to production server - # DOCS_BUILD_DIR: "./docs/_build/" - DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }} - DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }} - DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }} - DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }} - DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }} - run: | - sudo apt update - sudo apt install python3-pip python3-setuptools - source ./docs/utils.sh - add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER - export GIT_VER=$(git describe --always) - echo "PIP install requirements..." - pip3 install --user -r ./docs/requirements.txt - echo "Building the Docs..." - cd ./docs && build-docs -l en - echo "Deploy the Docs..." - export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/ - cd $GITHUB_WORKSPACE/docs - deploy-docs + - name: Check if release workflow is successful + if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }} + run: | + echo "Release workflow failed. Exiting..." + exit 1 + - uses: actions/checkout@v4 + with: + submodules: true + - uses: actions/setup-python@v5 + with: + cache-dependency-path: docs/requirements.txt + cache: "pip" + python-version: "3.10" + - name: Deploy Documentation + env: + # Deploy to production server + # DOCS_BUILD_DIR: "./docs/_build/" + DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }} + DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }} + DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }} + DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }} + DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }} + run: | + sudo apt update + sudo apt install python3-pip python3-setuptools + source ./docs/utils.sh + add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER + export GIT_VER=$(git describe --always) + echo "PIP install requirements..." + pip3 install --user -r ./docs/requirements.txt + echo "Building the Docs..." + cd ./docs && build-docs -l en + echo "Deploy the Docs..." + export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/ + cd $GITHUB_WORKSPACE/docs + deploy-docs diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 5d8e1794a8a..47480e8239a 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -3,21 +3,20 @@ name: GitHub Pages CI on: push: branches: - - master - - pages + - master + - pages paths: - - 'README.md' - - '.github/scripts/on-pages.sh' - - '.github/workflows/gh-pages.yml' + - "README.md" + - ".github/scripts/on-pages.sh" + - ".github/workflows/gh-pages.yml" jobs: - build-pages: name: Build GitHub Pages runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Copy Files - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: bash ./.github/scripts/on-pages.sh + - uses: actions/checkout@v4 + - name: Copy Files + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: bash ./.github/scripts/on-pages.sh diff --git a/.github/workflows/lib.yml b/.github/workflows/lib.yml index 7a7b863095d..1197308c921 100644 --- a/.github/workflows/lib.yml +++ b/.github/workflows/lib.yml @@ -7,7 +7,7 @@ on: # Schedule weekly builds on every Sunday at 4 am schedule: - - cron: '0 4 * * SUN' + - cron: "0 4 * * SUN" concurrency: group: libs-${{ github.event.pull_request.number || github.ref }} @@ -27,7 +27,6 @@ jobs: contains(github.event.pull_request.labels.*.name, 'lib_test') || (github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32') runs-on: ubuntu-latest - env: REPOSITORY: | - source-path: '.' @@ -60,7 +59,6 @@ jobs: - target: esp32p4 fqbn: espressif:esp32:esp32p4 - steps: # This step makes the contents of the repository available to the workflow - name: Checkout repository @@ -88,7 +86,7 @@ jobs: path: ${{ env.SKETCHES_REPORTS_PATH }} report-to-file: - needs: compile-sketch # Wait for the compile job to finish to get the data for the report + needs: compile-sketch # Wait for the compile job to finish to get the data for the report if: github.event_name == 'schedule' # Only run the job when the workflow is triggered by a schedule runs-on: ubuntu-latest steps: @@ -97,11 +95,10 @@ jobs: uses: actions/checkout@v4 with: token: ${{ env.GITHUB_TOKEN }} - fetch-depth: '0' + fetch-depth: "0" - name: Switch branch - run: - git checkout remotes/origin/gh-pages + run: git checkout remotes/origin/gh-pages # This step is needed to get the size data produced by the compile jobs - name: Download sketches reports artifact @@ -118,8 +115,7 @@ jobs: destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }} - name: Append file with action URL - run: - echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }} + run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }} - name: Push to github repo run: | diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 8257e78c822..dc009e445da 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -37,7 +37,7 @@ jobs: uses: actions/setup-python@v5 with: cache-dependency-path: tools/pre-commit/requirements.txt - cache: 'pip' + cache: "pip" python-version: "3.x" - name: Get Python version hash diff --git a/.github/workflows/publishlib.yml b/.github/workflows/publishlib.yml index 7fd932f5309..62393b80915 100644 --- a/.github/workflows/publishlib.yml +++ b/.github/workflows/publishlib.yml @@ -47,7 +47,7 @@ jobs: uses: juliangruber/read-file-action@v1 with: path: ./artifacts/workflows/pr_num.txt - + - name: Report results uses: P-R-O-C-H-Y/report-size-deltas@libs with: diff --git a/.github/workflows/publishsizes-2.x.yml b/.github/workflows/publishsizes-2.x.yml index bdd2fc311e4..ffbd751838c 100644 --- a/.github/workflows/publishsizes-2.x.yml +++ b/.github/workflows/publishsizes-2.x.yml @@ -11,11 +11,11 @@ env: jobs: sizes-test-results: - name: Sizes Comparsion Results + name: Sizes Comparison Results runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch + uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch with: ref: gh-pages @@ -41,8 +41,7 @@ jobs: destination-file: ${{ env.RESULT_SIZES_TEST_FILE }} - name: Append file with action URL - run: - echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }} + run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }} - name: Push to github repo run: | diff --git a/.github/workflows/publishsizes.yml b/.github/workflows/publishsizes.yml index ce4e94cd5e5..6c6d75eccce 100644 --- a/.github/workflows/publishsizes.yml +++ b/.github/workflows/publishsizes.yml @@ -14,7 +14,7 @@ env: jobs: sizes-test-results: - name: Sizes Comparsion Results + name: Sizes Comparison Results runs-on: ubuntu-latest if: | github.event.workflow_run.event == 'pull_request' && @@ -22,7 +22,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch + uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch with: ref: gh-pages diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index beee735c368..4f30bdbb844 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -4,30 +4,30 @@ on: workflow_dispatch: push: branches: - - master - - release/* + - master + - release/* pull_request: paths: - - 'cores/**' - - 'libraries/**' - - '!libraries/**.md' - - '!libraries/**.txt' - - '!libraries/**.properties' - - '!libraries/**.py' - - 'package/**' - - 'tools/**.py' - - 'platform.txt' - - 'programmers.txt' - - 'idf_component.yml' - - 'Kconfig.projbuild' - - 'package.json' - - 'CMakeLists.txt' - - '.github/workflows/push.yml' - - '.github/scripts/**' - - '!.github/scripts/find_*' - - '!.github/scripts/on-release.sh' - - '!.github/scripts/tests_*' - - '!.github/scripts/upload_*' + - "cores/**" + - "libraries/**" + - "!libraries/**.md" + - "!libraries/**.txt" + - "!libraries/**.properties" + - "!libraries/**.py" + - "package/**" + - "tools/**.py" + - "platform.txt" + - "programmers.txt" + - "idf_component.yml" + - "Kconfig.projbuild" + - "package.json" + - "CMakeLists.txt" + - ".github/workflows/push.yml" + - ".github/scripts/**" + - "!.github/scripts/find_*" + - "!.github/scripts/on-release.sh" + - "!.github/scripts/tests_*" + - "!.github/scripts/upload_*" - "variants/esp32/**/*" - "variants/esp32s2/**/*" - "variants/esp32s3/**/*" @@ -49,8 +49,8 @@ jobs: runs-on: ubuntu-latest if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }} steps: - - uses: actions/checkout@v4 - - run: bash ./.github/scripts/check-cmakelists.sh + - uses: actions/checkout@v4 + - run: bash ./.github/scripts/check-cmakelists.sh gen-chunks: name: Generate chunks @@ -65,16 +65,16 @@ jobs: chunk_count: ${{ steps.set-chunks.outputs.chunk_count }} chunks: ${{ steps.set-chunks.outputs.chunks }} steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 2 + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 2 - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v44 - with: - files_yaml: | + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@v44 + with: + files_yaml: | core: - '.github/**' - '!.github/scripts/install-platformio-esp32.sh' @@ -115,31 +115,31 @@ jobs: - '.github/scripts/install-platformio-esp32.sh' - 'tools/platformio-build.py' - - name: Set chunks - id: set-chunks - env: - LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }} - IS_PR: ${{ github.event_name == 'pull_request' }} - MAX_CHUNKS: ${{ env.MAX_CHUNKS }} - BUILD_PLATFORMIO: ${{ steps.changed-files.outputs.platformio_any_changed == 'true' }} - BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }} - BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }} - BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }} - FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }} - NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }} - CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }} - LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }} - run: | - bash ./.github/scripts/set_push_chunks.sh + - name: Set chunks + id: set-chunks + env: + LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }} + IS_PR: ${{ github.event_name == 'pull_request' }} + MAX_CHUNKS: ${{ env.MAX_CHUNKS }} + BUILD_PLATFORMIO: ${{ steps.changed-files.outputs.platformio_any_changed == 'true' }} + BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }} + BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }} + BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }} + FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }} + NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }} + CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }} + LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }} + run: | + bash ./.github/scripts/set_push_chunks.sh - - name: Upload sketches found - if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }} - uses: actions/upload-artifact@v4 - with: - name: sketches_found - path: sketches_found.txt - overwrite: true - if-no-files-found: error + - name: Upload sketches found + if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: sketches_found + path: sketches_found.txt + overwrite: true + if-no-files-found: error # Ubuntu build-arduino-linux: @@ -153,45 +153,45 @@ jobs: chunk: ${{ fromJson(needs.gen-chunks.outputs.chunks) }} steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.x' + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.x" - - name: Get libs cache - uses: actions/cache@v4 - with: - key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }} - path: | - ./tools/dist - ./tools/esp32-arduino-libs - ./tools/esptool - ./tools/mk* - ./tools/openocd-esp32 - ./tools/riscv32-* - ./tools/xtensa-* + - name: Get libs cache + uses: actions/cache@v4 + with: + key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }} + path: | + ./tools/dist + ./tools/esp32-arduino-libs + ./tools/esptool + ./tools/mk* + ./tools/openocd-esp32 + ./tools/riscv32-* + ./tools/xtensa-* - - name: Build all sketches - if: ${{ needs.gen-chunks.outputs.build_all == 'true' }} - run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1 + - name: Build all sketches + if: ${{ needs.gen-chunks.outputs.build_all == 'true' }} + run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1 - - name: Download sketches found - if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }} - uses: actions/download-artifact@v4 - with: - name: sketches_found + - name: Download sketches found + if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }} + uses: actions/download-artifact@v4 + with: + name: sketches_found - - name: Build selected sketches - if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }} - run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 sketches_found.txt + - name: Build selected sketches + if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }} + run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 sketches_found.txt - #Upload cli compile json as artifact - - name: Upload cli compile json - uses: actions/upload-artifact@v4 - with: - name: pr_cli_compile_${{ matrix.chunk }} - path: cli_compile_${{ matrix.chunk }}.json - overwrite: true + #Upload cli compile json as artifact + - name: Upload cli compile json + uses: actions/upload-artifact@v4 + with: + name: pr_cli_compile_${{ matrix.chunk }} + path: cli_compile_${{ matrix.chunk }}.json + overwrite: true # Windows and MacOS build-arduino-win-mac: @@ -205,12 +205,12 @@ jobs: os: [windows-latest, macOS-latest] steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - name: Build Sketches - run: bash ./.github/scripts/on-push.sh + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Build Sketches + run: bash ./.github/scripts/on-push.sh # # PlatformIO on Windows, Ubuntu and Mac # build-platformio: @@ -225,14 +225,13 @@ jobs: # fail-fast: false # matrix: # os: [ubuntu-latest, windows-latest, macOS-latest] - # steps: - # - uses: actions/checkout@v4 - # - uses: actions/setup-python@v5 - # with: - # python-version: '3.x' - # - name: Build Sketches - # run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO + # - uses: actions/checkout@v4 + # - uses: actions/setup-python@v5 + # with: + # python-version: "3.x" + # - name: Build Sketches + # run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO # ESP-IDF component build build-esp-idf-component: @@ -251,7 +250,17 @@ jobs: # https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html # for details. idf_ver: ["release-v5.3"] - idf_target: ["esp32", "esp32s2", "esp32s3", "esp32c2", "esp32c3", "esp32c6", "esp32h2", "esp32p4"] + idf_target: + [ + "esp32", + "esp32s2", + "esp32s3", + "esp32c2", + "esp32c3", + "esp32c6", + "esp32h2", + "esp32p4" + ] container: espressif/idf:${{ matrix.idf_ver }} steps: - name: Check out arduino-esp32 as a component @@ -276,16 +285,15 @@ jobs: if: github.event_name == 'push' && github.ref == 'refs/heads/master' runs-on: ubuntu-latest steps: - # Check out repository + # Check out repository - name: Checkout repository uses: actions/checkout@v4 with: token: ${{secrets.GITHUB_TOKEN}} - fetch-depth: '0' + fetch-depth: "0" - name: Switch branch - run: - git checkout remotes/origin/gh-pages + run: git checkout remotes/origin/gh-pages - name: Download sketches reports artifact uses: actions/download-artifact@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f8aa779d994..53a512dd54f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,15 +10,15 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - run: pip install packaging - - run: pip install pyserial - - name: Build Release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: bash ./.github/scripts/on-release.sh + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-python@v5 + with: + python-version: "3.x" + - run: pip install packaging + - run: pip install pyserial + - name: Build Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: bash ./.github/scripts/on-release.sh diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f57a1925c1c..0db3b98782b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,22 +16,22 @@ on: pull_request: types: [opened, reopened, closed, synchronize, labeled, unlabeled] paths: - - '.github/workflows/tests*' - - '.github/scripts/*.sh' - - '!.github/scripts/check-cmakelists.sh' - - '!.github/scripts/find_*' - - '!.github/scripts/on-*.sh' - - '!.github/scripts/set_push_chunks.sh' - - '!.github/scripts/update-version.sh' - - '!.github/scripts/upload_py_tools.sh' - - 'tests/**' - - 'cores/**' - - 'libraries/*/src/**.cpp' - - 'libraries/*/src/**.h' - - 'libraries/*/src/**.c' - - 'package/**' + - ".github/workflows/tests*" + - ".github/scripts/*.sh" + - "!.github/scripts/check-cmakelists.sh" + - "!.github/scripts/find_*" + - "!.github/scripts/on-*.sh" + - "!.github/scripts/set_push_chunks.sh" + - "!.github/scripts/update-version.sh" + - "!.github/scripts/upload_py_tools.sh" + - "tests/**" + - "cores/**" + - "libraries/*/src/**.cpp" + - "libraries/*/src/**.h" + - "libraries/*/src/**.c" + - "package/**" schedule: - - cron: '0 2 * * *' + - cron: "0 2 * * *" concurrency: group: tests-${{ github.event.pull_request.number || github.ref }} @@ -115,7 +115,7 @@ jobs: fail-fast: false matrix: type: ${{ fromJson(needs.gen-matrix.outputs.qemu-types) }} - chip: ['esp32', 'esp32c3'] + chip: ["esp32", "esp32c3"] with: type: ${{ matrix.type }} chip: ${{ matrix.chip }} diff --git a/.github/workflows/tests_build.yml b/.github/workflows/tests_build.yml index 090dfa8136b..7a5a2959657 100644 --- a/.github/workflows/tests_build.yml +++ b/.github/workflows/tests_build.yml @@ -5,11 +5,11 @@ on: inputs: type: type: string - description: 'Type of tests to build' + description: "Type of tests to build" required: true chip: type: string - description: 'Chip to build tests for' + description: "Chip to build tests for" required: true jobs: diff --git a/.github/workflows/tests_hw.yml b/.github/workflows/tests_hw.yml index d7922500f10..76480ed7c0e 100644 --- a/.github/workflows/tests_hw.yml +++ b/.github/workflows/tests_hw.yml @@ -5,11 +5,11 @@ on: inputs: type: type: string - description: 'Type of tests to run' + description: "Type of tests to run" required: true chip: type: string - description: 'Chip to run tests for' + description: "Chip to run tests for" required: true env: diff --git a/.github/workflows/tests_qemu.yml b/.github/workflows/tests_qemu.yml index 0b4ec18e7ac..6675909c9df 100644 --- a/.github/workflows/tests_qemu.yml +++ b/.github/workflows/tests_qemu.yml @@ -64,8 +64,8 @@ jobs: if: ${{ steps.check-tests.outputs.enabled == 'true' }} with: cache-dependency-path: tests/requirements.txt - cache: 'pip' - python-version: '3.x' + cache: "pip" + python-version: "3.x" - name: Install Python dependencies if: ${{ steps.check-tests.outputs.enabled == 'true' }} diff --git a/.github/workflows/tests_results.yml b/.github/workflows/tests_results.yml index f9c572bf546..4ef338a9e16 100644 --- a/.github/workflows/tests_results.yml +++ b/.github/workflows/tests_results.yml @@ -18,11 +18,11 @@ jobs: github.event.workflow_run.conclusion == 'timed_out' runs-on: ubuntu-latest permissions: - actions: write - statuses: write - checks: write - pull-requests: write - contents: write + actions: write + statuses: write + checks: write + pull-requests: write + contents: write steps: - uses: actions/checkout@v4 with: @@ -139,13 +139,13 @@ jobs: core.info(`${name} is ${state}`); - name: Create output folder - if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} + if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled run: | rm -rf artifacts mkdir -p runtime-tests-results - name: Generate badge - if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} + if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled uses: jaywcjlove/generated-badges@v1.0.13 with: label: Runtime Tests @@ -154,7 +154,7 @@ jobs: color: ${{ job.status == 'success' && 'green' || 'red' }} - name: Push badge - if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} + if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled run: | git config user.name "github-actions[bot]" git config user.email "41898282+github-actions[bot]@users.noreply.github.com" diff --git a/.github/workflows/tests_wokwi.yml b/.github/workflows/tests_wokwi.yml index a891ca89dfd..f5eb2efcad2 100644 --- a/.github/workflows/tests_wokwi.yml +++ b/.github/workflows/tests_wokwi.yml @@ -247,8 +247,8 @@ jobs: if: ${{ steps.check-tests.outputs.enabled == 'true' }} with: cache-dependency-path: tests/requirements.txt - cache: 'pip' - python-version: '3.x' + cache: "pip" + python-version: "3.x" - name: Install dependencies if: ${{ steps.check-tests.outputs.enabled == 'true' }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a949631bd9..f80261422b0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,8 +12,9 @@ default_language_version: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v4.5.0" + rev: "v5.0.0" hooks: + # Generic checks - id: check-case-conflict - id: check-symlinks - id: debug-statements @@ -25,6 +26,8 @@ repos: args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] + + # JSON formatting - id: pretty-format-json stages: [manual] args: [--autofix] @@ -35,40 +38,67 @@ repos: package\.json$| ^package\/.*$ ) + - repo: https://github.com/codespell-project/codespell rev: "v2.3.0" hooks: + # Spell checking - id: codespell exclude: ^.*\.(svd|SVD)$ + - repo: https://github.com/pre-commit/mirrors-clang-format rev: "v18.1.3" hooks: + # C/C++ formatting - id: clang-format types_or: [c, c++] exclude: ^.*\/build_opt\.h$ + - repo: https://github.com/psf/black-pre-commit-mirror - rev: "22.10.0" + rev: "24.10.0" hooks: + # Python formatting - id: black types_or: [python] args: [--line-length=120] #From the arduino code style. Add as argument rather than creating a new config file. + - repo: https://github.com/PyCQA/flake8 - rev: "7.0.0" + rev: "7.1.1" hooks: + # Python linting - id: flake8 types_or: [python] additional_dependencies: - flake8-bugbear - flake8-comprehensions - flake8-simplify + - repo: https://github.com/pre-commit/mirrors-prettier rev: "v3.1.0" hooks: + # YAML formatting - id: prettier types_or: [yaml] + + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: "v0.10.0.1" + hooks: + # Bash linting + - id: shellcheck + types: [shell] + + - repo: https://github.com/openstack/bashate + rev: "2.1.1" + hooks: + # Bash formatting + - id: bashate + types: [shell] + args: ["-i", "E006"] # Ignore E006: Line too long + - repo: https://github.com/errata-ai/vale - rev: "v3.0.7" + rev: "v3.9.1" hooks: + # Sync vale styles and lint markdown and reStructuredText - id: vale name: vale-sync language_version: "1.21.6" diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 00000000000..a7612e611a2 --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1,11 @@ +# Shellcheck configuration file for ESP32 Arduino core + +# Optional checks. https://github.com/koalaman/shellcheck/wiki/optional +enable=add-default-case,deprecate-which,avoid-nullary-conditions + +# Enable search for external sources +external-sources=true + +# Search folder for sourced files. +# Set to the folder where the original script is located. +source-path=SCRIPTDIR diff --git a/boards.txt b/boards.txt index fa8c85b97cc..9a55e7b0ef9 100644 --- a/boards.txt +++ b/boards.txt @@ -47257,3 +47257,457 @@ waveshare_esp32_s3_touch_amoled_191.menu.EraseFlash.all=Enabled waveshare_esp32_s3_touch_amoled_191.menu.EraseFlash.all.upload.erase_cmd=-e ############################################################## + + +Pcbcupid_GLYPH_C3.name=Pcbcupid GLYPH C3 +Pcbcupid_GLYPH_C3.vid.0=0x2886 +Pcbcupid_GLYPH_C3.pid.0=0x0046 + +Pcbcupid_GLYPH_C3.bootloader.tool=esptool_py +Pcbcupid_GLYPH_C3.bootloader.tool.default=esptool_py + +Pcbcupid_GLYPH_C3.upload.tool=esptool_py +Pcbcupid_GLYPH_C3.upload.tool.default=esptool_py +Pcbcupid_GLYPH_C3.upload.tool.network=esp_ota + +Pcbcupid_GLYPH_C3.upload.maximum_size=1310720 +Pcbcupid_GLYPH_C3.upload.maximum_data_size=327680 +Pcbcupid_GLYPH_C3.upload.flags= +Pcbcupid_GLYPH_C3.upload.extra_flags= +Pcbcupid_GLYPH_C3.upload.use_1200bps_touch=false +Pcbcupid_GLYPH_C3.upload.wait_for_upload_port=false + +Pcbcupid_GLYPH_C3.serial.disableDTR=false +Pcbcupid_GLYPH_C3.serial.disableRTS=false + +Pcbcupid_GLYPH_C3.build.tarch=riscv32 +Pcbcupid_GLYPH_C3.build.target=esp +Pcbcupid_GLYPH_C3.build.mcu=esp32c3 +Pcbcupid_GLYPH_C3.build.core=esp32 +Pcbcupid_GLYPH_C3.build.variant=Pcbcupid_GLYPH_C3 +Pcbcupid_GLYPH_C3.build.board=PCBCUPID_GLYPHC3 +Pcbcupid_GLYPH_C3.build.bootloader_addr=0x0 + +Pcbcupid_GLYPH_C3.build.cdc_on_boot=1 +Pcbcupid_GLYPH_C3.build.f_cpu=160000000L +Pcbcupid_GLYPH_C3.build.flash_size=4MB +Pcbcupid_GLYPH_C3.build.flash_freq=80m +Pcbcupid_GLYPH_C3.build.flash_mode=qio +Pcbcupid_GLYPH_C3.build.boot=qio +Pcbcupid_GLYPH_C3.build.partitions=default +Pcbcupid_GLYPH_C3.build.defines= + +Pcbcupid_GLYPH_C3.menu.CDCOnBoot.default=Enabled +Pcbcupid_GLYPH_C3.menu.CDCOnBoot.default.build.cdc_on_boot=1 +Pcbcupid_GLYPH_C3.menu.CDCOnBoot.cdc=Disabled +Pcbcupid_GLYPH_C3.menu.CDCOnBoot.cdc.build.cdc_on_boot=0 + +Pcbcupid_GLYPH_C3.menu.PartitionScheme.default=Default 4MB with spiffs (1.2MB APP/1.5MB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.default.build.partitions=default +Pcbcupid_GLYPH_C3.menu.PartitionScheme.defaultffat=Default 4MB with ffat (1.2MB APP/1.5MB FATFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.defaultffat.build.partitions=default_ffat +Pcbcupid_GLYPH_C3.menu.PartitionScheme.minimal=Minimal (1.3MB APP/700KB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.minimal.build.partitions=minimal +Pcbcupid_GLYPH_C3.menu.PartitionScheme.no_ota=No OTA (2MB APP/2MB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.no_ota.build.partitions=no_ota +Pcbcupid_GLYPH_C3.menu.PartitionScheme.no_ota.upload.maximum_size=2097152 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3g=No OTA (1MB APP/3MB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3g.build.partitions=noota_3g +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3g.upload.maximum_size=1048576 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_ffat=No OTA (2MB APP/2MB FATFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_ffat.build.partitions=noota_ffat +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_ffat.upload.maximum_size=2097152 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3gffat=No OTA (1MB APP/3MB FATFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3gffat.build.partitions=noota_3gffat +Pcbcupid_GLYPH_C3.menu.PartitionScheme.noota_3gffat.upload.maximum_size=1048576 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.huge_app=Huge APP (3MB No OTA/1MB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.huge_app.build.partitions=huge_app +Pcbcupid_GLYPH_C3.menu.PartitionScheme.huge_app.upload.maximum_size=3145728 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.min_spiffs=Minimal SPIFFS (1.9MB APP with OTA/190KB SPIFFS) +Pcbcupid_GLYPH_C3.menu.PartitionScheme.min_spiffs.build.partitions=min_spiffs +Pcbcupid_GLYPH_C3.menu.PartitionScheme.min_spiffs.upload.maximum_size=1966080 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker=RainMaker 4MB +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker.build.partitions=rainmaker +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker.upload.maximum_size=1966080 +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker_4MB=RainMaker 4MB No OTA +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker_4MB.build.partitions=rainmaker_4MB_no_ota +Pcbcupid_GLYPH_C3.menu.PartitionScheme.rainmaker_4MB.upload.maximum_size=4038656 + +Pcbcupid_GLYPH_C3.menu.CPUFreq.160=160MHz (WiFi) +Pcbcupid_GLYPH_C3.menu.CPUFreq.160.build.f_cpu=160000000L +Pcbcupid_GLYPH_C3.menu.CPUFreq.80=80MHz (WiFi) +Pcbcupid_GLYPH_C3.menu.CPUFreq.80.build.f_cpu=80000000L +Pcbcupid_GLYPH_C3.menu.CPUFreq.40=40MHz +Pcbcupid_GLYPH_C3.menu.CPUFreq.40.build.f_cpu=40000000L +Pcbcupid_GLYPH_C3.menu.CPUFreq.20=20MHz +Pcbcupid_GLYPH_C3.menu.CPUFreq.20.build.f_cpu=20000000L +Pcbcupid_GLYPH_C3.menu.CPUFreq.10=10MHz +Pcbcupid_GLYPH_C3.menu.CPUFreq.10.build.f_cpu=10000000L + +Pcbcupid_GLYPH_C3.menu.FlashMode.qio=QIO +Pcbcupid_GLYPH_C3.menu.FlashMode.qio.build.flash_mode=dio +Pcbcupid_GLYPH_C3.menu.FlashMode.qio.build.boot=qio +Pcbcupid_GLYPH_C3.menu.FlashMode.dio=DIO +Pcbcupid_GLYPH_C3.menu.FlashMode.dio.build.flash_mode=dio +Pcbcupid_GLYPH_C3.menu.FlashMode.dio.build.boot=dio + +Pcbcupid_GLYPH_C3.menu.FlashFreq.80=80MHz +Pcbcupid_GLYPH_C3.menu.FlashFreq.80.build.flash_freq=80m +Pcbcupid_GLYPH_C3.menu.FlashFreq.40=40MHz +Pcbcupid_GLYPH_C3.menu.FlashFreq.40.build.flash_freq=40m + +Pcbcupid_GLYPH_C3.menu.FlashSize.4M=4MB (32Mb) +Pcbcupid_GLYPH_C3.menu.FlashSize.4M.build.flash_size=4MB + +Pcbcupid_GLYPH_C3.menu.UploadSpeed.921600=921600 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.921600.upload.speed=921600 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.115200=115200 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.115200.upload.speed=115200 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.256000.windows=256000 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.256000.upload.speed=256000 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.230400.windows.upload.speed=256000 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.230400=230400 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.230400.upload.speed=230400 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.460800.linux=460800 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.460800.macosx=460800 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.460800.upload.speed=460800 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.512000.windows=512000 +Pcbcupid_GLYPH_C3.menu.UploadSpeed.512000.upload.speed=512000 + +Pcbcupid_GLYPH_C3.menu.DebugLevel.none=None +Pcbcupid_GLYPH_C3.menu.DebugLevel.none.build.code_debug=0 +Pcbcupid_GLYPH_C3.menu.DebugLevel.error=Error +Pcbcupid_GLYPH_C3.menu.DebugLevel.error.build.code_debug=1 +Pcbcupid_GLYPH_C3.menu.DebugLevel.warn=Warn +Pcbcupid_GLYPH_C3.menu.DebugLevel.warn.build.code_debug=2 +Pcbcupid_GLYPH_C3.menu.DebugLevel.info=Info +Pcbcupid_GLYPH_C3.menu.DebugLevel.info.build.code_debug=3 +Pcbcupid_GLYPH_C3.menu.DebugLevel.debug=Debug +Pcbcupid_GLYPH_C3.menu.DebugLevel.debug.build.code_debug=4 +Pcbcupid_GLYPH_C3.menu.DebugLevel.verbose=Verbose +Pcbcupid_GLYPH_C3.menu.DebugLevel.verbose.build.code_debug=5 + +Pcbcupid_GLYPH_C3.menu.EraseFlash.none=Disabled +Pcbcupid_GLYPH_C3.menu.EraseFlash.none.upload.erase_cmd= +Pcbcupid_GLYPH_C3.menu.EraseFlash.all=Enabled +Pcbcupid_GLYPH_C3.menu.EraseFlash.all.upload.erase_cmd=-e + +############################################################## + + +Pcbcupid_GLYPH_H2.name=Pcbcupid GLYPH H2 + +Pcbcupid_GLYPH_H2.bootloader.tool=esptool_py +Pcbcupid_GLYPH_H2.bootloader.tool.default=esptool_py + +Pcbcupid_GLYPH_H2.upload.tool=esptool_py +Pcbcupid_GLYPH_H2.upload.tool.default=esptool_py +Pcbcupid_GLYPH_H2.upload.tool.network=esp_ota + +Pcbcupid_GLYPH_H2.upload.maximum_size=1310720 +Pcbcupid_GLYPH_H2.upload.maximum_data_size=327680 +Pcbcupid_GLYPH_H2.upload.flags= +Pcbcupid_GLYPH_H2.upload.extra_flags= +Pcbcupid_GLYPH_H2.upload.use_1200bps_touch=false +Pcbcupid_GLYPH_H2.upload.wait_for_upload_port=false + +Pcbcupid_GLYPH_H2.serial.disableDTR=false +Pcbcupid_GLYPH_H2.serial.disableRTS=false + +Pcbcupid_GLYPH_H2.build.tarch=riscv32 +Pcbcupid_GLYPH_H2.build.target=esp +Pcbcupid_GLYPH_H2.build.mcu=esp32h2 +Pcbcupid_GLYPH_H2.build.core=esp32 +Pcbcupid_GLYPH_H2.build.variant=Pcbcupid_GLYPH_H2 +Pcbcupid_GLYPH_H2.build.board=PCBCUPID_GLYPHH2 +Pcbcupid_GLYPH_H2.build.bootloader_addr=0x0 + +Pcbcupid_GLYPH_H2.build.cdc_on_boot=1 +Pcbcupid_GLYPH_H2.build.f_cpu=96000000L +Pcbcupid_GLYPH_H2.build.flash_size=4MB +Pcbcupid_GLYPH_H2.build.flash_freq=64m +Pcbcupid_GLYPH_H2.build.img_freq=48m +Pcbcupid_GLYPH_H2.build.flash_mode=qio +Pcbcupid_GLYPH_H2.build.boot=qio +Pcbcupid_GLYPH_H2.build.partitions=default +Pcbcupid_GLYPH_H2.build.defines= + +## IDE 2.0 Seems to not update the value +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.default=Disabled +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.default.build.copy_jtag_files=0 +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.builtin=Integrated USB JTAG +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.builtin.build.openocdscript=esp32h2-builtin.cfg +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.builtin.build.copy_jtag_files=1 +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.external=FTDI Adapter +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.external.build.openocdscript=esp32h2-ftdi.cfg +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.external.build.copy_jtag_files=1 +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.bridge=ESP USB Bridge +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.bridge.build.openocdscript=esp32h2-bridge.cfg +Pcbcupid_GLYPH_H2.menu.JTAGAdapter.bridge.build.copy_jtag_files=1 + +Pcbcupid_GLYPH_H2.menu.CDCOnBoot.default=Enabled +Pcbcupid_GLYPH_H2.menu.CDCOnBoot.default.build.cdc_on_boot=1 +Pcbcupid_GLYPH_H2.menu.CDCOnBoot.cdc=Disabled +Pcbcupid_GLYPH_H2.menu.CDCOnBoot.cdc.build.cdc_on_boot=0 + +Pcbcupid_GLYPH_H2.menu.PartitionScheme.default=Default 4MB with spiffs (1.2MB APP/1.5MB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.default.build.partitions=default +Pcbcupid_GLYPH_H2.menu.PartitionScheme.defaultffat=Default 4MB with ffat (1.2MB APP/1.5MB FATFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.defaultffat.build.partitions=default_ffat +Pcbcupid_GLYPH_H2.menu.PartitionScheme.minimal=Minimal (1.3MB APP/700KB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.minimal.build.partitions=minimal +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_fs=No FS 4MB (2MB APP x2) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_fs.build.partitions=no_fs +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_fs.upload.maximum_size=2031616 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_ota=No OTA (2MB APP/2MB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_ota.build.partitions=no_ota +Pcbcupid_GLYPH_H2.menu.PartitionScheme.no_ota.upload.maximum_size=2097152 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3g=No OTA (1MB APP/3MB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3g.build.partitions=noota_3g +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3g.upload.maximum_size=1048576 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_ffat=No OTA (2MB APP/2MB FATFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_ffat.build.partitions=noota_ffat +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_ffat.upload.maximum_size=2097152 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3gffat=No OTA (1MB APP/3MB FATFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3gffat.build.partitions=noota_3gffat +Pcbcupid_GLYPH_H2.menu.PartitionScheme.noota_3gffat.upload.maximum_size=1048576 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.huge_app=Huge APP (3MB No OTA/1MB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.huge_app.build.partitions=huge_app +Pcbcupid_GLYPH_H2.menu.PartitionScheme.huge_app.upload.maximum_size=3145728 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.min_spiffs=Minimal SPIFFS (1.9MB APP with OTA/190KB SPIFFS) +Pcbcupid_GLYPH_H2.menu.PartitionScheme.min_spiffs.build.partitions=min_spiffs +Pcbcupid_GLYPH_H2.menu.PartitionScheme.min_spiffs.upload.maximum_size=1966080 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee=Zigbee 4MB with spiffs +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee.build.partitions=zigbee +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee.upload.maximum_size=1310720 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee_zczr=Zigbee ZCZR 4MB with spiffs +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee_zczr.build.partitions=zigbee_zczr +Pcbcupid_GLYPH_H2.menu.PartitionScheme.zigbee_zczr.upload.maximum_size=1310720 +Pcbcupid_GLYPH_H2.menu.PartitionScheme.custom=Custom +Pcbcupid_GLYPH_H2.menu.PartitionScheme.custom.build.partitions= +Pcbcupid_GLYPH_H2.menu.PartitionScheme.custom.upload.maximum_size=16777216 + +Pcbcupid_GLYPH_H2.menu.FlashMode.qio=QIO +Pcbcupid_GLYPH_H2.menu.FlashMode.qio.build.flash_mode=dio +Pcbcupid_GLYPH_H2.menu.FlashMode.qio.build.boot=qio +Pcbcupid_GLYPH_H2.menu.FlashMode.dio=DIO +Pcbcupid_GLYPH_H2.menu.FlashMode.dio.build.flash_mode=dio +Pcbcupid_GLYPH_H2.menu.FlashMode.dio.build.boot=dio + +Pcbcupid_GLYPH_H2.menu.FlashFreq.64=64MHz +Pcbcupid_GLYPH_H2.menu.FlashFreq.64.build.flash_freq=64m +Pcbcupid_GLYPH_H2.menu.FlashFreq.64.build.img_freq=48m +#Pcbcupid_GLYPH_H2.menu.FlashFreq.32=32MHz +#Pcbcupid_GLYPH_H2.menu.FlashFreq.32.build.flash_freq=32m +#Pcbcupid_GLYPH_H2.menu.FlashFreq.32.build.img_freq=24m +Pcbcupid_GLYPH_H2.menu.FlashFreq.16=16MHz +Pcbcupid_GLYPH_H2.menu.FlashFreq.16.build.flash_freq=16m +Pcbcupid_GLYPH_H2.menu.FlashFreq.16.build.img_freq=12m + +Pcbcupid_GLYPH_H2.menu.FlashSize.2M=2MB (16Mb) +Pcbcupid_GLYPH_H2.menu.FlashSize.2M.build.flash_size=2MB +Pcbcupid_GLYPH_H2.menu.FlashSize.4M=4MB (32Mb) +Pcbcupid_GLYPH_H2.menu.FlashSize.4M.build.flash_size=4MB + +Pcbcupid_GLYPH_H2.menu.UploadSpeed.921600=921600 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.921600.upload.speed=921600 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.115200=115200 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.115200.upload.speed=115200 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.256000.windows=256000 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.256000.upload.speed=256000 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.230400.windows.upload.speed=256000 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.230400=230400 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.230400.upload.speed=230400 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.460800.linux=460800 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.460800.macosx=460800 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.460800.upload.speed=460800 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.512000.windows=512000 +Pcbcupid_GLYPH_H2.menu.UploadSpeed.512000.upload.speed=512000 + +Pcbcupid_GLYPH_H2.menu.DebugLevel.none=None +Pcbcupid_GLYPH_H2.menu.DebugLevel.none.build.code_debug=0 +Pcbcupid_GLYPH_H2.menu.DebugLevel.error=Error +Pcbcupid_GLYPH_H2.menu.DebugLevel.error.build.code_debug=1 +Pcbcupid_GLYPH_H2.menu.DebugLevel.warn=Warn +Pcbcupid_GLYPH_H2.menu.DebugLevel.warn.build.code_debug=2 +Pcbcupid_GLYPH_H2.menu.DebugLevel.info=Info +Pcbcupid_GLYPH_H2.menu.DebugLevel.info.build.code_debug=3 +Pcbcupid_GLYPH_H2.menu.DebugLevel.debug=Debug +Pcbcupid_GLYPH_H2.menu.DebugLevel.debug.build.code_debug=4 +Pcbcupid_GLYPH_H2.menu.DebugLevel.verbose=Verbose +Pcbcupid_GLYPH_H2.menu.DebugLevel.verbose.build.code_debug=5 + +Pcbcupid_GLYPH_H2.menu.EraseFlash.none=Disabled +Pcbcupid_GLYPH_H2.menu.EraseFlash.none.upload.erase_cmd= +Pcbcupid_GLYPH_H2.menu.EraseFlash.all=Enabled +Pcbcupid_GLYPH_H2.menu.EraseFlash.all.upload.erase_cmd=-e + +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.default=Disabled +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.default.build.zigbee_mode= +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.default.build.zigbee_libs= +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.ed=Zigbee ED (end device) +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.ed.build.zigbee_mode=-DZIGBEE_MODE_ED +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.ed.build.zigbee_libs=-lesp_zb_api_ed -lesp_zb_cli_command -lzboss_stack.ed -lzboss_port +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.zczr=Zigbee ZCZR (coordinator/router) +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.zczr.build.zigbee_mode=-DZIGBEE_MODE_ZCZR +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.zczr.build.zigbee_libs=-lesp_zb_api_zczr -lesp_zb_cli_command -lzboss_stack.zczr -lzboss_port +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.rcp=Zigbee RCP (radio co-processor) +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.rcp.build.zigbee_mode=-DZIGBEE_MODE_RCP +Pcbcupid_GLYPH_H2.menu.ZigbeeMode.rcp.build.zigbee_libs=-lesp_zb_api_rcp -lesp_zb_cli_command -lzboss_stack.rcp -lzboss_port + +############################################################## + + +Pcbcupid_GLYPH_C6.name=Pcbcupid GLYPH C6 + +Pcbcupid_GLYPH_C6.bootloader.tool=esptool_py +Pcbcupid_GLYPH_C6.bootloader.tool.default=esptool_py + +Pcbcupid_GLYPH_C6.upload.tool=esptool_py +Pcbcupid_GLYPH_C6.upload.tool.default=esptool_py +Pcbcupid_GLYPH_C6.upload.tool.network=esp_ota + +Pcbcupid_GLYPH_C6.upload.maximum_size=1310720 +Pcbcupid_GLYPH_C6.upload.maximum_data_size=327680 +Pcbcupid_GLYPH_C6.upload.flags= +Pcbcupid_GLYPH_C6.upload.extra_flags= +Pcbcupid_GLYPH_C6.upload.use_1200bps_touch=false +Pcbcupid_GLYPH_C6.upload.wait_for_upload_port=false + +Pcbcupid_GLYPH_C6.serial.disableDTR=false +Pcbcupid_GLYPH_C6.serial.disableRTS=false + +Pcbcupid_GLYPH_C6.build.tarch=riscv32 +Pcbcupid_GLYPH_C6.build.target=esp +Pcbcupid_GLYPH_C6.build.mcu=esp32c6 +Pcbcupid_GLYPH_C6.build.core=esp32 +Pcbcupid_GLYPH_C6.build.variant=Pcbcupid_GLYPH_C6 +Pcbcupid_GLYPH_C6.build.board=PCBCUPID_GLYPHC6 +Pcbcupid_GLYPH_C6.build.bootloader_addr=0x0 + +Pcbcupid_GLYPH_C6.build.cdc_on_boot=1 +Pcbcupid_GLYPH_C6.build.f_cpu=160000000L +Pcbcupid_GLYPH_C6.build.flash_size=4MB +Pcbcupid_GLYPH_C6.build.flash_freq=80m +Pcbcupid_GLYPH_C6.build.flash_mode=qio +Pcbcupid_GLYPH_C6.build.boot=qio +Pcbcupid_GLYPH_C6.build.partitions=default +Pcbcupid_GLYPH_C6.build.defines= + +## IDE 2.0 Seems to not update the value +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.default=Disabled +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.default.build.copy_jtag_files=0 +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.builtin=Integrated USB JTAG +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.builtin.build.openocdscript=esp32c6-builtin.cfg +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.builtin.build.copy_jtag_files=1 +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.external=FTDI Adapter +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.external.build.openocdscript=esp32c6-ftdi.cfg +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.external.build.copy_jtag_files=1 +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.bridge=ESP USB Bridge +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.bridge.build.openocdscript=esp32c6-bridge.cfg +Pcbcupid_GLYPH_C6.menu.JTAGAdapter.bridge.build.copy_jtag_files=1 + +Pcbcupid_GLYPH_C6.menu.CDCOnBoot.cdc=Enabled +Pcbcupid_GLYPH_C6.menu.CDCOnBoot.cdc.build.cdc_on_boot=1 +Pcbcupid_GLYPH_C6.menu.CDCOnBoot.default=Disabled +Pcbcupid_GLYPH_C6.menu.CDCOnBoot.default.build.cdc_on_boot=0 + +Pcbcupid_GLYPH_C6.menu.PartitionScheme.default=Default 4MB with spiffs (1.2MB APP/1.5MB SPIFFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.default.build.partitions=default +Pcbcupid_GLYPH_C6.menu.PartitionScheme.defaultffat=Default 4MB with ffat (1.2MB APP/1.5MB FATFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.defaultffat.build.partitions=default_ffat +Pcbcupid_GLYPH_C6.menu.PartitionScheme.no_ota=No OTA (2MB APP/2MB SPIFFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.no_ota.build.partitions=no_ota +Pcbcupid_GLYPH_C6.menu.PartitionScheme.no_ota.upload.maximum_size=2097152 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3g=No OTA (1MB APP/3MB SPIFFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3g.build.partitions=noota_3g +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3g.upload.maximum_size=1048576 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_ffat=No OTA (2MB APP/2MB FATFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_ffat.build.partitions=noota_ffat +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_ffat.upload.maximum_size=2097152 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3gffat=No OTA (1MB APP/3MB FATFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3gffat.build.partitions=noota_3gffat +Pcbcupid_GLYPH_C6.menu.PartitionScheme.noota_3gffat.upload.maximum_size=1048576 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.huge_app=Huge APP (3MB No OTA/1MB SPIFFS) +Pcbcupid_GLYPH_C6.menu.PartitionScheme.huge_app.build.partitions=huge_app +Pcbcupid_GLYPH_C6.menu.PartitionScheme.huge_app.upload.maximum_size=3145728 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee=Zigbee 4MB with spiffs +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee.build.partitions=zigbee +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee.upload.maximum_size=1310720 +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee_zczr=Zigbee ZCZR 4MB with spiffs +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee_zczr.build.partitions=zigbee_zczr +Pcbcupid_GLYPH_C6.menu.PartitionScheme.zigbee_zczr.upload.maximum_size=1310720 + +Pcbcupid_GLYPH_C6.menu.CPUFreq.160=160MHz (WiFi) +Pcbcupid_GLYPH_C6.menu.CPUFreq.160.build.f_cpu=160000000L +Pcbcupid_GLYPH_C6.menu.CPUFreq.80=80MHz (WiFi) +Pcbcupid_GLYPH_C6.menu.CPUFreq.80.build.f_cpu=80000000L +Pcbcupid_GLYPH_C6.menu.CPUFreq.40=40MHz +Pcbcupid_GLYPH_C6.menu.CPUFreq.40.build.f_cpu=40000000L +Pcbcupid_GLYPH_C6.menu.CPUFreq.20=20MHz +Pcbcupid_GLYPH_C6.menu.CPUFreq.20.build.f_cpu=20000000L +Pcbcupid_GLYPH_C6.menu.CPUFreq.10=10MHz +Pcbcupid_GLYPH_C6.menu.CPUFreq.10.build.f_cpu=10000000L + +Pcbcupid_GLYPH_C6.menu.FlashMode.qio=QIO +Pcbcupid_GLYPH_C6.menu.FlashMode.qio.build.flash_mode=dio +Pcbcupid_GLYPH_C6.menu.FlashMode.qio.build.boot=qio +Pcbcupid_GLYPH_C6.menu.FlashMode.dio=DIO +Pcbcupid_GLYPH_C6.menu.FlashMode.dio.build.flash_mode=dio +Pcbcupid_GLYPH_C6.menu.FlashMode.dio.build.boot=dio + +Pcbcupid_GLYPH_C6.menu.FlashFreq.80=80MHz +Pcbcupid_GLYPH_C6.menu.FlashFreq.80.build.flash_freq=80m +Pcbcupid_GLYPH_C6.menu.FlashFreq.40=40MHz +Pcbcupid_GLYPH_C6.menu.FlashFreq.40.build.flash_freq=40m + +Pcbcupid_GLYPH_C6.menu.FlashSize.4M=4MB (32Mb) +Pcbcupid_GLYPH_C6.menu.FlashSize.4M.build.flash_size=4MB + +Pcbcupid_GLYPH_C6.menu.UploadSpeed.921600=921600 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.921600.upload.speed=921600 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.115200=115200 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.115200.upload.speed=115200 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.256000.windows=256000 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.256000.upload.speed=256000 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.230400.windows.upload.speed=256000 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.230400=230400 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.230400.upload.speed=230400 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.460800.linux=460800 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.460800.macosx=460800 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.460800.upload.speed=460800 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.512000.windows=512000 +Pcbcupid_GLYPH_C6.menu.UploadSpeed.512000.upload.speed=512000 + +Pcbcupid_GLYPH_C6.menu.DebugLevel.none=None +Pcbcupid_GLYPH_C6.menu.DebugLevel.none.build.code_debug=0 +Pcbcupid_GLYPH_C6.menu.DebugLevel.error=Error +Pcbcupid_GLYPH_C6.menu.DebugLevel.error.build.code_debug=1 +Pcbcupid_GLYPH_C6.menu.DebugLevel.warn=Warn +Pcbcupid_GLYPH_C6.menu.DebugLevel.warn.build.code_debug=2 +Pcbcupid_GLYPH_C6.menu.DebugLevel.info=Info +Pcbcupid_GLYPH_C6.menu.DebugLevel.info.build.code_debug=3 +Pcbcupid_GLYPH_C6.menu.DebugLevel.debug=Debug +Pcbcupid_GLYPH_C6.menu.DebugLevel.debug.build.code_debug=4 +Pcbcupid_GLYPH_C6.menu.DebugLevel.verbose=Verbose +Pcbcupid_GLYPH_C6.menu.DebugLevel.verbose.build.code_debug=5 + +Pcbcupid_GLYPH_C6.menu.EraseFlash.none=Disabled +Pcbcupid_GLYPH_C6.menu.EraseFlash.none.upload.erase_cmd= +Pcbcupid_GLYPH_C6.menu.EraseFlash.all=Enabled +Pcbcupid_GLYPH_C6.menu.EraseFlash.all.upload.erase_cmd=-e + +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.default=Disabled +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.default.build.zigbee_mode= +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.default.build.zigbee_libs= +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.ed=Zigbee ED (end device) +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.ed.build.zigbee_mode=-DZIGBEE_MODE_ED +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.ed.build.zigbee_libs=-lesp_zb_api_ed -lesp_zb_cli_command -lzboss_stack.ed -lzboss_port +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.zczr=Zigbee ZCZR (coordinator/router) +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.zczr.build.zigbee_mode=-DZIGBEE_MODE_ZCZR +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.zczr.build.zigbee_libs=-lesp_zb_api_zczr -lesp_zb_cli_command -lzboss_stack.zczr -lzboss_port +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.rcp=Zigbee RCP (radio co-processor) +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.rcp.build.zigbee_mode=-DZIGBEE_MODE_RCP +Pcbcupid_GLYPH_C6.menu.ZigbeeMode.rcp.build.zigbee_libs=-lesp_zb_api_rcp -lesp_zb_cli_command -lzboss_stack.rcp -lzboss_port + +############################################################## diff --git a/cores/esp32/Esp.cpp b/cores/esp32/Esp.cpp index aa189516469..9f90a828b25 100644 --- a/cores/esp32/Esp.cpp +++ b/cores/esp32/Esp.cpp @@ -277,7 +277,7 @@ const char *EspClass::getChipModel(void) { return "ESP32-D0WD"; } case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5: return "ESP32-D2WD"; - case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD2: return "ESP32-PICO-D2"; + case EFUSE_RD_CHIP_VER_PKG_ESP32U4WDH: return "ESP32-U4WDH"; case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4: return "ESP32-PICO-D4"; case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302: return "ESP32-PICO-V3-02"; case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3: return "ESP32-D0WDR2-V3"; diff --git a/cores/esp32/esp32-hal.h b/cores/esp32/esp32-hal.h index d80bf2f15de..d0bd4b8bc93 100644 --- a/cores/esp32/esp32-hal.h +++ b/cores/esp32/esp32-hal.h @@ -61,6 +61,19 @@ extern "C" { #define ARDUINO_EVENT_RUNNING_CORE CONFIG_ARDUINO_EVENT_RUNNING_CORE #endif +#if CONFIG_IDF_TARGET_ESP32 || CONFIG_IDF_TARGET_ESP32S2 || CONFIG_IDF_TARGET_ESP32S3 +static const uint8_t BOOT_PIN = 0; +#elif CONFIG_IDF_TARGET_ESP32C2 || CONFIG_IDF_TARGET_ESP32C3 || CONFIG_IDF_TARGET_ESP32C6 || CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C61 +static const uint8_t BOOT_PIN = 9; +#elif CONFIG_IDF_TARGET_ESP32P4 +static const uint8_t BOOT_PIN = 35; +#elif CONFIG_IDF_TARGET_ESP32C5 +static const uint8_t BOOT_PIN = 28; +#else +#error BOOT_PIN not defined for this chip! +#endif +#define BOOT_PIN BOOT_PIN + //forward declaration from freertos/portmacro.h void vPortYield(void); void yield(void); diff --git a/docs/_static/logo_pio.png b/docs/_static/logo_pio.png deleted file mode 100644 index a64c1563964..00000000000 Binary files a/docs/_static/logo_pio.png and /dev/null differ diff --git a/docs/en/contributing.rst b/docs/en/contributing.rst index 1e1529fb557..fb7843f1fb6 100644 --- a/docs/en/contributing.rst +++ b/docs/en/contributing.rst @@ -441,6 +441,16 @@ For checking the code style and other code quality checks, we use pre-commit hoo These hooks will be automatically run by the CI when a Pull Request is marked as ``Status: Pending Merge``. You can check which hooks are being run in the ``.pre-commit-config.yaml`` file. +Currently, we have hooks for the following tasks: + +* Formatters for C, C++, Python, Bash, JSON, Markdown and ReStructuredText files; +* Linters for Python, Bash and prose (spoken language); +* Checking for spelling errors in the code and documentation; +* Removing trailing whitespaces and tabs in the code; +* Checking for the presence of private keys and other sensitive information in the code; +* Fixing the line endings and end of files (EOF) in the code; +* And more. + You can read more about the pre-commit hooks in the `pre-commit documentation `_. If you want to run the pre-commit hooks locally, you first need to install the required dependencies by running: diff --git a/docs/en/getting_started.rst b/docs/en/getting_started.rst index c4bd54b246c..8d312317bdf 100644 --- a/docs/en/getting_started.rst +++ b/docs/en/getting_started.rst @@ -78,14 +78,13 @@ Supported IDEs Here is the list of supported IDE for Arduino ESP32 support integration. -+-------------------+-------------------+ -| |arduino-logo| | |pio-logo| | -+-------------------+-------------------+ -| Arduino IDE | PlatformIO | -+-------------------+-------------------+ ++-------------------+ +| |arduino-logo| | ++-------------------+ +| Arduino IDE | ++-------------------+ .. |arduino-logo| image:: ../_static/logo_arduino.png -.. |pio-logo| image:: ../_static/logo_pio.png See `Installing Guides `_ for more details on how to install the Arduino ESP32 support. diff --git a/docs/en/installing.rst b/docs/en/installing.rst index ec405b3552c..d5392d4b5ec 100644 --- a/docs/en/installing.rst +++ b/docs/en/installing.rst @@ -63,92 +63,6 @@ To start the installation process using the Boards Manager, follow these steps: - Restart Arduino IDE. -Installing using PlatformIO ---------------------------- - -.. figure:: ../_static/logo_pio.png - :align: center - :width: 200 - :figclass: align-center - -PlatformIO is a professional collaborative platform for embedded development. It has out-of-the-box support for ESP32 SoCs and allows working with Arduino ESP32 as well as ESP-IDF from Espressif without changing your development environment. PlatformIO includes lots of instruments for the most common development tasks such as debugging, unit testing, and static code analysis. - -.. warning:: Integration of the Arduino Core ESP32 project in PlatformIO is maintained by PlatformIO developers. Arduino Core ESP32 Project Team cannot support PlatformIO-specific issues. Please report these issues in official `PlatformIO repositories `_. - -A detailed overview of the PlatformIO ecosystem and its philosophy can be found in `the official documentation `_. - -PlatformIO can be used in two flavors: - -- `PlatformIO IDE `_ is a toolset for embedded C/C++ development available on Windows, macOS and Linux platforms - -- `PlatformIO Core (CLI) `_ is a command-line tool that consists of a multi-platform build system, platform and library managers and other integration components. It can be used with a variety of code development environments and allows integration with cloud platforms and web services - -To install PlatformIO, you can follow this Getting Started, provided at `docs.platformio.org`_. - -Using the stable code -********************* - -.. note:: - A detailed overview of supported development boards, examples and frameworks can be found on `the official Espressif32 dev-platform page `_ in the PlatformIO Registry. - -The most reliable and easiest way to get started is to use the latest stable version of the ESP32 development platform that passed all tests/verifications and can be used in production. - -Create a new project and select one of the available boards. You can change after by changing the `platformio.ini `_ file. - -- For ESP32 - -.. code-block:: bash - - [env:esp32dev] - platform = espressif32 - board = esp32dev - framework = arduino - -- For ESP32-S2 (ESP32-S2-Saola-1 board) - -.. code-block:: bash - - [env:esp32-s2-saola-1] - platform = espressif32 - board = esp32-s2-saola-1 - framework = arduino - -- For ESP32-C3 (ESP32-C3-DevKitM-1 board) - -.. code-block:: bash - - [env:esp32-c3-devkitm-1] - platform = espressif32 - board = esp32-c3-devkitm-1 - framework = arduino - -How to update to the latest code -******************************** - -To test the latest Arduino ESP32, you need to change your project *platformio.ini* accordingly. -The following configuration uses the upstream version of the Espressif development platform and the latest Arduino core directly from the Espressif GitHub repository: - -.. code-block:: bash - - [env:esp32-c3-devkitm-1] - platform = https://github.com/platformio/platform-espressif32.git - board = esp32-c3-devkitm-1 - framework = arduino - platform_packages = - framework-arduinoespressif32 @ https://github.com/espressif/arduino-esp32#master - - -To get more information about PlatformIO, see the following links: - -- `PlatformIO Core (CLI) `_ - -- `PlatformIO Home `_ - -- `Tutorials and Examples `_ - -- `Library Management `_ - - Windows (manual installation) ----------------------------- @@ -360,4 +274,3 @@ Where ``~/Documents/Arduino`` represents your sketch book location as per "Ardui - Restart Arduino IDE. .. _Arduino.cc: https://www.arduino.cc/en/Main/Software -.. _docs.platformio.org: https://docs.platformio.org/en/latest/integration/ide/pioide.html diff --git a/docs/en/tutorials/blink.rst b/docs/en/tutorials/blink.rst index b5f6a767f8d..f4a53ec945d 100644 --- a/docs/en/tutorials/blink.rst +++ b/docs/en/tutorials/blink.rst @@ -7,7 +7,7 @@ Introduction This is the interactive blink tutorial using `Wokwi`_. For this tutorial, you don't need the ESP32 board or the Arduino toolchain. -.. note:: If you don't want to use this tutorial with the simulation, you can copy and paste the :ref:`blink_example_code` from `Wokwi`_ editor and use it on the `Arduino IDE`_ or `PlatformIO`_. +.. note:: If you don't want to use this tutorial with the simulation, you can copy and paste the :ref:`blink_example_code` from `Wokwi`_ editor and use it on the `Arduino IDE`. About this Tutorial ------------------- @@ -109,5 +109,4 @@ Resources .. _ESP32 Datasheet: https://www.espressif.com/sites/default/files/documentation/esp32_datasheet_en.pdf .. _Wokwi: https://wokwi.com/ -.. _PlatformIO: https://docs.espressif.com/projects/arduino-esp32/en/latest/installing.html#platformio .. _Arduino IDE: https://docs.espressif.com/projects/arduino-esp32/en/latest/installing.html#installing-using-boards-manager diff --git a/docs/utils.sh b/docs/utils.sh index 84f37489975..3a860ac8a2c 100644 --- a/docs/utils.sh +++ b/docs/utils.sh @@ -1,18 +1,19 @@ +#!/bin/bash # Bash helper functions for adding SSH keys -function add_ssh_keys() { - local key_string="${1}" - mkdir -p ~/.ssh - chmod 700 ~/.ssh - echo -n "${key_string}" >~/.ssh/id_rsa_base64 - base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 >~/.ssh/id_rsa - chmod 600 ~/.ssh/id_rsa +function add_ssh_keys { + local key_string="${1}" + mkdir -p ~/.ssh + chmod 700 ~/.ssh + echo -n "${key_string}" >~/.ssh/id_rsa_base64 + base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 >~/.ssh/id_rsa + chmod 600 ~/.ssh/id_rsa } -function add_doc_server_ssh_keys() { - local key_string="${1}" - local server_url="${2}" - local server_user="${3}" - add_ssh_keys "${key_string}" - echo -e "Host ${server_url}\n\tStrictHostKeyChecking no\n\tUser ${server_user}\n" >>~/.ssh/config +function add_doc_server_ssh_keys { + local key_string="${1}" + local server_url="${2}" + local server_user="${3}" + add_ssh_keys "${key_string}" + echo -e "Host ${server_url}\n\tStrictHostKeyChecking no\n\tUser ${server_user}\n" >>~/.ssh/config } diff --git a/libraries/BLE/src/BLECharacteristic.cpp b/libraries/BLE/src/BLECharacteristic.cpp index 1d1bafdda1c..b03d524a6a5 100644 --- a/libraries/BLE/src/BLECharacteristic.cpp +++ b/libraries/BLE/src/BLECharacteristic.cpp @@ -279,9 +279,13 @@ void BLECharacteristic::handleGATTServerEvent(esp_gatts_cb_event_t event, esp_ga log_d(" - Response to write event: New value: handle: %.2x, uuid: %s", getHandle(), getUUID().toString().c_str()); +// The call to BLEUtils::buildHexData() doesn't output anything if the log level is not +// "DEBUG". As it is quite CPU intensive, it is much better to not call it if not needed. +#if ARDUHAL_LOG_LEVEL >= ARDUHAL_LOG_LEVEL_DEBUG char *pHexData = BLEUtils::buildHexData(nullptr, param->write.value, param->write.len); log_d(" - Data: length: %d, data: %s", param->write.len, pHexData); free(pHexData); +#endif if (param->write.need_rsp) { esp_gatt_rsp_t rsp; @@ -390,9 +394,13 @@ void BLECharacteristic::handleGATTServerEvent(esp_gatts_cb_event_t event, esp_ga rsp.attr_value.handle = param->read.handle; rsp.attr_value.auth_req = ESP_GATT_AUTH_REQ_NONE; +// The call to BLEUtils::buildHexData() doesn't output anything if the log level is not +// "DEBUG". As it is quite CPU intensive, it is much better to not call it if not needed. +#if ARDUHAL_LOG_LEVEL >= ARDUHAL_LOG_LEVEL_DEBUG char *pHexData = BLEUtils::buildHexData(nullptr, rsp.attr_value.value, rsp.attr_value.len); log_d(" - Data: length=%d, data=%s, offset=%d", rsp.attr_value.len, pHexData, rsp.attr_value.offset); free(pHexData); +#endif esp_err_t errRc = ::esp_ble_gatts_send_response(gatts_if, param->read.conn_id, param->read.trans_id, ESP_GATT_OK, &rsp); if (errRc != ESP_OK) { @@ -471,7 +479,20 @@ void BLECharacteristic::notify(bool is_notification) { m_pCallbacks->onNotify(this); // Invoke the notify callback. + // GeneralUtils::hexDump() doesn't output anything if the log level is not + // "VERBOSE". Additionally, it is very CPU intensive, even when it doesn't + // output anything! So it is much better to *not* call it at all if not needed. + // In a simple program which calls BLECharacteristic::notify() every 50 ms, + // the performance gain of this little optimization is 37% in release mode + // (-O3) and 57% in debug mode. + // Of course, the "#if ARDUHAL_LOG_LEVEL >= ARDUHAL_LOG_LEVEL_VERBOSE" guard + // could also be put inside the GeneralUtils::hexDump() function itself. But + // it's better to put it here also, as it is clearer (indicating a verbose log + // thing) and it allows to remove the "m_value.getValue().c_str()" call, which + // is, in itself, quite CPU intensive. +#if ARDUHAL_LOG_LEVEL >= ARDUHAL_LOG_LEVEL_VERBOSE GeneralUtils::hexDump((uint8_t *)m_value.getValue().c_str(), m_value.getValue().length()); +#endif if (getService()->getServer()->getConnectedCount() == 0) { log_v("<< notify: No connected clients."); @@ -624,9 +645,13 @@ void BLECharacteristic::setReadProperty(bool value) { * @param [in] length The length of the data in bytes. */ void BLECharacteristic::setValue(uint8_t *data, size_t length) { +// The call to BLEUtils::buildHexData() doesn't output anything if the log level is not +// "VERBOSE". As it is quite CPU intensive, it is much better to not call it if not needed. +#if ARDUHAL_LOG_LEVEL >= ARDUHAL_LOG_LEVEL_VERBOSE char *pHex = BLEUtils::buildHexData(nullptr, data, length); log_v(">> setValue: length=%d, data=%s, characteristic UUID=%s", length, pHex, getUUID().toString().c_str()); free(pHex); +#endif if (length > ESP_GATT_MAX_ATTR_LEN) { log_e("Size %d too large, must be no bigger than %d", length, ESP_GATT_MAX_ATTR_LEN); return; diff --git a/libraries/ESP32/examples/FreeRTOS/BasicMultiThreading/README.md b/libraries/ESP32/examples/FreeRTOS/BasicMultiThreading/README.md index 7bd44855adc..f48e352dd45 100644 --- a/libraries/ESP32/examples/FreeRTOS/BasicMultiThreading/README.md +++ b/libraries/ESP32/examples/FreeRTOS/BasicMultiThreading/README.md @@ -62,10 +62,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Troubleshooting ***Important: Make sure you are using a good quality USB cable and that you have a reliable power source*** diff --git a/libraries/ESP32/examples/FreeRTOS/Mutex/README.md b/libraries/ESP32/examples/FreeRTOS/Mutex/README.md index d1c8c19e3be..435528bd771 100644 --- a/libraries/ESP32/examples/FreeRTOS/Mutex/README.md +++ b/libraries/ESP32/examples/FreeRTOS/Mutex/README.md @@ -51,10 +51,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example Log Output The expected output of shared variables protected by mutex demonstrates mutually exclusive access from tasks - they do not interrupt each other and do not rewrite the value before the other task has read it back. diff --git a/libraries/ESP32/examples/FreeRTOS/Queue/README.md b/libraries/ESP32/examples/FreeRTOS/Queue/README.md index 745ce9e8db6..e81d6741e2a 100644 --- a/libraries/ESP32/examples/FreeRTOS/Queue/README.md +++ b/libraries/ESP32/examples/FreeRTOS/Queue/README.md @@ -29,10 +29,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example Log Output ``` diff --git a/libraries/ESP32/examples/FreeRTOS/Semaphore/README.md b/libraries/ESP32/examples/FreeRTOS/Semaphore/README.md index 8f860a52db5..fcb38eed1d6 100644 --- a/libraries/ESP32/examples/FreeRTOS/Semaphore/README.md +++ b/libraries/ESP32/examples/FreeRTOS/Semaphore/README.md @@ -35,10 +35,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example Log Output ``` diff --git a/libraries/ESP32/examples/RMT/Legacy_RMT_Driver_Compatible/Legacy_RMT_Driver_Compatible.ino b/libraries/ESP32/examples/RMT/Legacy_RMT_Driver_Compatible/Legacy_RMT_Driver_Compatible.ino index 5744cf884a7..b42fe15f0cd 100644 --- a/libraries/ESP32/examples/RMT/Legacy_RMT_Driver_Compatible/Legacy_RMT_Driver_Compatible.ino +++ b/libraries/ESP32/examples/RMT/Legacy_RMT_Driver_Compatible/Legacy_RMT_Driver_Compatible.ino @@ -12,7 +12,6 @@ // add the file "build_opt.h" to your Arduino project folder with "-DESP32_ARDUINO_NO_RGB_BUILTIN" to use the RMT Legacy driver #error "ESP32_ARDUINO_NO_RGB_BUILTIN is not defined, this example is intended to demonstrate the RMT Legacy driver." #error "Please add the file 'build_opt.h' with '-DESP32_ARDUINO_NO_RGB_BUILTIN' to your Arduino project folder." -#error "Another way to disable the RGB_BUILTIN is to define it in the platformio.ini file, for instance: '-D ESP32_ARDUINO_NO_RGB_BUILTIN'" #else diff --git a/libraries/ESP32/examples/Template/ExampleTemplate/README.md b/libraries/ESP32/examples/Template/ExampleTemplate/README.md index f5aa7b35e86..91b50967e9e 100644 --- a/libraries/ESP32/examples/Template/ExampleTemplate/README.md +++ b/libraries/ESP32/examples/Template/ExampleTemplate/README.md @@ -64,10 +64,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example/Log Output ==(OPTIONAL)== ==*Add the log/serial output here!*== diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/.gitignore b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/.gitignore deleted file mode 100644 index 653e92272d5..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.pio -.vscode -mklittlefs.exe -mklittlefs diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/README.md b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/README.md deleted file mode 100644 index beed34e92f1..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# How to run on PlatformIO IDE - -- Download and extract to this project root a **mklittlefs** executable for your OS [from a zipped binary here](https://github.com/earlephilhower/mklittlefs/releases) -- Open **LITTLEFS_PlatformIO** folder -- Run PlatformIO project task: **Upload Filesystem Image** -- Run PlatformIO project task: **Upload and Monitor** -- You will see a Serial output like: -``` ---- Miniterm on COM5 115200,8,N,1 --- ---- Quit: Ctrl+C | Menu: Ctrl+T | Help: Ctrl+T followed by Ctrl+H --- -ets Jun 8 2016 00:22:57 - -rst:0x1 (POWERON_RESET),boot:0x13 (Snfigsip: 0, SPIWP:0xee -clk_drv:0x00,q_drv:0x00,d_drv:0x00,cs0_drv:0x00,hd_drv:0x00,wp_drv:0x00 -mode:DIO, clock div:2 -load:0x3fff0018,len:4 -load:0x3fff001c,len:1044 -load:0x40078000,len:10044 -load:0x40080400,len:5872 -entry 0x400806ac -Listing directory: / - FILE: /file1.txt SIZE: 3 LAST WRITE: 2020-10-06 15:10:33 - DIR : /testfolder LAST WRITE: 2020-10-06 15:10:33 -Creating Dir: /mydir -Dir created -Writing file: /mydir/hello2.txt -- file written -Listing directory: / - FILE: /file1.txt SIZE: 3 LAST WRITE: 2020-10-06 15:10:33 - DIR : /mydir LAST WRITE: 1970-01-01 00:00:00 -Listing directory: /mydir - FILE: /mydir/hello2.txt SIZE: 6 LAST WRITE: 1970-01-01 00:00:00 - DIR : /testfolder LAST WRITE: 2020-10-06 15:10:33 -Listing directory: /testfolder - FILE: /testfolder/test2.txt SIZE: 3 LAST WRITE: 2020-10-06 15:10:33 -Deleting file: /mydir/hello2.txt -- file deleted -Removing Dir: /mydir -Dir removed -Listing directory: / - FILE: /file1.txt SIZE: 3 LAST WRITE: 2020-10-06 15:10:33 - DIR : /testfolder LAST WRITE: 2020-10-06 15:10:33 -Listing directory: /testfolder - FILE: /testfolder/test2.txt SIZE: 3 LAST WRITE: 2020-10-06 15:10:33 -Writing file: /hello.txt -- file written -Appending to file: /hello.txt -- message appended -Reading file: /hello.txt -- read from file: -Hello World! -Renaming file /hello.txt to /foo.txt -- file renamed -Reading file: /foo.txt -- read from file: -Hello World! -Deleting file: /foo.txt -- file deleted -Testing file I/O with /test.txt -- writing................................................................ - - 1048576 bytes written in 12006 ms -- reading................................................................ -- 1048576 bytes read in 547 ms -Deleting file: /test.txt -- file deleted -Test complete -``` -- If you have a module with more than 4 MB flash, you can uncomment **partitions_custom.csv** in **platformio.ini** and modify the csv file accordingly diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/file1.txt b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/file1.txt deleted file mode 100644 index 72943a16fb2..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/file1.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/testfolder/test2.txt b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/testfolder/test2.txt deleted file mode 100644 index f761ec192d9..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/data/testfolder/test2.txt +++ /dev/null @@ -1 +0,0 @@ -bbb diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/include/.placeholder.txt b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/include/.placeholder.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/lib/.placeholder.txt b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/lib/.placeholder.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/partitions_custom.csv b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/partitions_custom.csv deleted file mode 100644 index 97846fa59bb..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/partitions_custom.csv +++ /dev/null @@ -1,6 +0,0 @@ -# Name, Type, SubType, Offset, Size, Flags -ota_0, app, ota_0, 0x10000, 0x1A0000, -ota_1, app, ota_1, , 0x1A0000, -otadata, data, ota, 0x350000, 0x2000, -nvs, data, nvs, , 0x6000, -data, data, spiffs, , 0xA8000, diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/platformio.ini b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/platformio.ini deleted file mode 100644 index dce1ac84456..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/platformio.ini +++ /dev/null @@ -1,22 +0,0 @@ -; PlatformIO Project Configuration File -; -; Build options: build flags, source filter -; Upload options: custom upload port, speed and extra flags -; Library options: dependencies, extra library storages -; Advanced options: extra scripting -; -; Please visit documentation for the other options and examples -; https://docs.platformio.org/page/projectconf.html - -[platformio] -default_envs = esp32 - -[env] -framework = arduino - -[env:esp32] -platform = espressif32 -board = esp32dev -board_build.partitions = partitions_custom.csv -monitor_filters = esp32_exception_decoder -monitor_speed = 115200 diff --git a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/src/main.cpp b/libraries/LittleFS/examples/LITTLEFS_PlatformIO/src/main.cpp deleted file mode 100644 index 5ae9e8d7dfc..00000000000 --- a/libraries/LittleFS/examples/LITTLEFS_PlatformIO/src/main.cpp +++ /dev/null @@ -1,286 +0,0 @@ -#include -#include "FS.h" -#include -#include - -/* You only need to format LittleFS the first time you run a - test or else use the LITTLEFS plugin to create a partition - https://github.com/lorol/arduino-esp32littlefs-plugin */ - -#define FORMAT_LITTLEFS_IF_FAILED true - -void listDir(fs::FS &fs, const char *dirname, uint8_t levels) { - Serial.printf("Listing directory: %s\r\n", dirname); - - File root = fs.open(dirname); - if (!root) { - Serial.println("- failed to open directory"); - return; - } - if (!root.isDirectory()) { - Serial.println(" - not a directory"); - return; - } - - File file = root.openNextFile(); - while (file) { - if (file.isDirectory()) { - Serial.print(" DIR : "); - - Serial.print(file.name()); - time_t t = file.getLastWrite(); - struct tm *tmstruct = localtime(&t); - Serial.printf( - " LAST WRITE: %d-%02d-%02d %02d:%02d:%02d\n", (tmstruct->tm_year) + 1900, (tmstruct->tm_mon) + 1, tmstruct->tm_mday, tmstruct->tm_hour, - tmstruct->tm_min, tmstruct->tm_sec - ); - - if (levels) { - listDir(fs, file.name(), levels - 1); - } - } else { - Serial.print(" FILE: "); - Serial.print(file.name()); - Serial.print(" SIZE: "); - - Serial.print(file.size()); - time_t t = file.getLastWrite(); - struct tm *tmstruct = localtime(&t); - Serial.printf( - " LAST WRITE: %d-%02d-%02d %02d:%02d:%02d\n", (tmstruct->tm_year) + 1900, (tmstruct->tm_mon) + 1, tmstruct->tm_mday, tmstruct->tm_hour, - tmstruct->tm_min, tmstruct->tm_sec - ); - } - file = root.openNextFile(); - } -} - -void createDir(fs::FS &fs, const char *path) { - Serial.printf("Creating Dir: %s\n", path); - if (fs.mkdir(path)) { - Serial.println("Dir created"); - } else { - Serial.println("mkdir failed"); - } -} - -void removeDir(fs::FS &fs, const char *path) { - Serial.printf("Removing Dir: %s\n", path); - if (fs.rmdir(path)) { - Serial.println("Dir removed"); - } else { - Serial.println("rmdir failed"); - } -} - -void readFile(fs::FS &fs, const char *path) { - Serial.printf("Reading file: %s\r\n", path); - - File file = fs.open(path); - if (!file || file.isDirectory()) { - Serial.println("- failed to open file for reading"); - return; - } - - Serial.println("- read from file:"); - while (file.available()) { - Serial.write(file.read()); - } - file.close(); -} - -void writeFile(fs::FS &fs, const char *path, const char *message) { - Serial.printf("Writing file: %s\r\n", path); - - File file = fs.open(path, FILE_WRITE); - if (!file) { - Serial.println("- failed to open file for writing"); - return; - } - if (file.print(message)) { - Serial.println("- file written"); - } else { - Serial.println("- write failed"); - } - file.close(); -} - -void appendFile(fs::FS &fs, const char *path, const char *message) { - Serial.printf("Appending to file: %s\r\n", path); - - File file = fs.open(path, FILE_APPEND); - if (!file) { - Serial.println("- failed to open file for appending"); - return; - } - if (file.print(message)) { - Serial.println("- message appended"); - } else { - Serial.println("- append failed"); - } - file.close(); -} - -void renameFile(fs::FS &fs, const char *path1, const char *path2) { - Serial.printf("Renaming file %s to %s\r\n", path1, path2); - if (fs.rename(path1, path2)) { - Serial.println("- file renamed"); - } else { - Serial.println("- rename failed"); - } -} - -void deleteFile(fs::FS &fs, const char *path) { - Serial.printf("Deleting file: %s\r\n", path); - if (fs.remove(path)) { - Serial.println("- file deleted"); - } else { - Serial.println("- delete failed"); - } -} - -// SPIFFS-like write and delete file - -// See: https://github.com/esp8266/Arduino/blob/master/libraries/LittleFS/src/LittleFS.cpp#L60 -void writeFile2(fs::FS &fs, const char *path, const char *message) { - if (!fs.exists(path)) { - if (strchr(path, '/')) { - Serial.printf("Create missing folders of: %s\r\n", path); - char *pathStr = strdup(path); - if (pathStr) { - char *ptr = strchr(pathStr, '/'); - while (ptr) { - *ptr = 0; - fs.mkdir(pathStr); - *ptr = '/'; - ptr = strchr(ptr + 1, '/'); - } - } - free(pathStr); - } - } - - Serial.printf("Writing file to: %s\r\n", path); - File file = fs.open(path, FILE_WRITE); - if (!file) { - Serial.println("- failed to open file for writing"); - return; - } - if (file.print(message)) { - Serial.println("- file written"); - } else { - Serial.println("- write failed"); - } - file.close(); -} - -// See: https://github.com/esp8266/Arduino/blob/master/libraries/LittleFS/src/LittleFS.h#L149 -void deleteFile2(fs::FS &fs, const char *path) { - Serial.printf("Deleting file and empty folders on path: %s\r\n", path); - - if (fs.remove(path)) { - Serial.println("- file deleted"); - } else { - Serial.println("- delete failed"); - } - - char *pathStr = strdup(path); - if (pathStr) { - char *ptr = strrchr(pathStr, '/'); - if (ptr) { - Serial.printf("Removing all empty folders on path: %s\r\n", path); - } - while (ptr) { - *ptr = 0; - fs.rmdir(pathStr); - ptr = strrchr(pathStr, '/'); - } - free(pathStr); - } -} - -void testFileIO(fs::FS &fs, const char *path) { - Serial.printf("Testing file I/O with %s\r\n", path); - - static uint8_t buf[512]; - size_t len = 0; - File file = fs.open(path, FILE_WRITE); - if (!file) { - Serial.println("- failed to open file for writing"); - return; - } - - size_t i; - Serial.print("- writing"); - uint32_t start = millis(); - for (i = 0; i < 2048; i++) { - if ((i & 0x001F) == 0x001F) { - Serial.print("."); - } - file.write(buf, 512); - } - Serial.println(""); - uint32_t end = millis() - start; - Serial.printf(" - %u bytes written in %u ms\r\n", 2048 * 512, end); - file.close(); - - file = fs.open(path); - start = millis(); - end = start; - i = 0; - if (file && !file.isDirectory()) { - len = file.size(); - size_t flen = len; - start = millis(); - Serial.print("- reading"); - while (len) { - size_t toRead = len; - if (toRead > 512) { - toRead = 512; - } - file.read(buf, toRead); - if ((i++ & 0x001F) == 0x001F) { - Serial.print("."); - } - len -= toRead; - } - Serial.println(""); - end = millis() - start; - Serial.printf("- %u bytes read in %u ms\r\n", flen, end); - file.close(); - } else { - Serial.println("- failed to open file for reading"); - } -} - -void setup() { - Serial.begin(115200); - if (!LittleFS.begin(FORMAT_LITTLEFS_IF_FAILED)) { - Serial.println("LittleFS Mount Failed"); - return; - } - - listDir(LittleFS, "/", 0); - createDir(LittleFS, "/mydir"); - writeFile(LittleFS, "/mydir/hello2.txt", "Hello2"); - //writeFile(LittleFS, "/mydir/newdir2/newdir3/hello3.txt", "Hello3"); - writeFile2(LittleFS, "/mydir/newdir2/newdir3/hello3.txt", "Hello3"); - listDir(LittleFS, "/", 3); - deleteFile(LittleFS, "/mydir/hello2.txt"); - //deleteFile(LittleFS, "/mydir/newdir2/newdir3/hello3.txt"); - deleteFile2(LittleFS, "/mydir/newdir2/newdir3/hello3.txt"); - removeDir(LittleFS, "/mydir"); - listDir(LittleFS, "/", 3); - writeFile(LittleFS, "/hello.txt", "Hello "); - appendFile(LittleFS, "/hello.txt", "World!\r\n"); - readFile(LittleFS, "/hello.txt"); - renameFile(LittleFS, "/hello.txt", "/foo.txt"); - readFile(LittleFS, "/foo.txt"); - deleteFile(LittleFS, "/foo.txt"); - testFileIO(LittleFS, "/test.txt"); - deleteFile(LittleFS, "/test.txt"); - - Serial.println("Test complete"); -} - -void loop() {} diff --git a/libraries/NetworkClientSecure/README.md b/libraries/NetworkClientSecure/README.md index d028158730d..f83cf246287 100644 --- a/libraries/NetworkClientSecure/README.md +++ b/libraries/NetworkClientSecure/README.md @@ -32,25 +32,11 @@ This method is similar to the single root certificate verification above, but it root certificates from Mozilla to authenticate against, while the previous method only accepts a single certificate for a given server. This allows the client to connect to all public SSL servers. -To use this feature in PlatformIO: -1. create a certificate bundle as described in the document below, or obtain a pre-built one you trust: -https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/protocols/esp_crt_bundle.html -(gen_crt_bundle.py can be found in the /tools folder) - a. note: the full bundle will take up around 64k of flash space, but has minimal RAM usage, as only - the index of the certificates is kept in RAM -2. Place the bundle under the file name "data/cert/x509_crt_bundle.bin" in your platformio project -3. add "board_build.embed_files = data/cert/x509_crt_bundle.bin" in your platformio.ini -4. add the following global declaration in your project: - extern const uint8_t rootca_crt_bundle_start[] asm("_binary_data_cert_x509_crt_bundle_bin_start"); -5. before initiating the first SSL connection, call - my_client.setCACertBundle(rootca_crt_bundle_start); - To use this feature in Arduino IDE: If the Arduino IDE added support for embedding files in the meantime, then follow the instructions above. -If not, you have three choices: -1. convert your project to PlatformIO -2. create a makefile where you can add the idf_component_register() declaration to include the certificate bundle -3. Store the bundle as a SPIFFS file, but then you have to load it into RAM in runtime and waste 64k of precious memory +If not, you have two choices: +1. create a makefile where you can add the idf_component_register() declaration to include the certificate bundle +2. Store the bundle as a SPIFFS file, but then you have to load it into RAM in runtime and waste 64k of precious memory Using a root CA cert and client cert/keys ----------------------------------------- diff --git a/libraries/WebServer/examples/MultiHomedServers/README.md b/libraries/WebServer/examples/MultiHomedServers/README.md index 83ec6223850..04b96dfbd53 100644 --- a/libraries/WebServer/examples/MultiHomedServers/README.md +++ b/libraries/WebServer/examples/MultiHomedServers/README.md @@ -67,10 +67,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example Log Output ``` diff --git a/libraries/WiFi/examples/FTM/FTM_Initiator/README.md b/libraries/WiFi/examples/FTM/FTM_Initiator/README.md index 3558f75d372..b9c7f8d438d 100644 --- a/libraries/WiFi/examples/FTM/FTM_Initiator/README.md +++ b/libraries/WiFi/examples/FTM/FTM_Initiator/README.md @@ -55,10 +55,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or setting the `upload_port` option on the `platformio.ini` file. - ## Log Output Expected log output: diff --git a/libraries/WiFi/examples/FTM/FTM_Responder/README.md b/libraries/WiFi/examples/FTM/FTM_Responder/README.md index feede0867f3..fdcf1ab921b 100644 --- a/libraries/WiFi/examples/FTM/FTM_Responder/README.md +++ b/libraries/WiFi/examples/FTM/FTM_Responder/README.md @@ -48,10 +48,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or setting the `upload_port` option on the `platformio.ini` file. - ## Log Output Expected log output: diff --git a/libraries/WiFi/examples/WiFiClient/README.md b/libraries/WiFi/examples/WiFiClient/README.md index 8b6a5d9caeb..9d3698a543a 100644 --- a/libraries/WiFi/examples/WiFiClient/README.md +++ b/libraries/WiFi/examples/WiFiClient/README.md @@ -61,10 +61,6 @@ To get more information about the Espressif boards see [Espressif Development Ki * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example Log Output The initial output which is common for all examples can be ignored: diff --git a/libraries/WiFi/examples/WiFiClientConnect/README.md b/libraries/WiFi/examples/WiFiClientConnect/README.md index eab02b674ff..939d44c5b76 100644 --- a/libraries/WiFi/examples/WiFiClientConnect/README.md +++ b/libraries/WiFi/examples/WiFiClientConnect/README.md @@ -18,10 +18,6 @@ Currently, this example supports the following targets. * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port`` option on the `platformio.ini` file. - ## Example/Log Output ``` diff --git a/libraries/WiFi/examples/WiFiScan/README.md b/libraries/WiFi/examples/WiFiScan/README.md index ec39cc6c639..f1268f21b5c 100644 --- a/libraries/WiFi/examples/WiFiScan/README.md +++ b/libraries/WiFi/examples/WiFiScan/README.md @@ -18,10 +18,6 @@ Currently, this example supports the following targets. * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or setting the `upload_port` option on the `platformio.ini` file. - ## Example/Log Output ``` diff --git a/libraries/WiFi/examples/WiFiScanAsync/README.md b/libraries/WiFi/examples/WiFiScanAsync/README.md index a557173c10f..26120aaa31c 100644 --- a/libraries/WiFi/examples/WiFiScanAsync/README.md +++ b/libraries/WiFi/examples/WiFiScanAsync/README.md @@ -18,10 +18,6 @@ Currently, this example supports the following targets. * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or setting the `upload_port` option on the `platformio.ini` file. - ## Example/Log Output ``` diff --git a/libraries/WiFi/examples/WiFiScanDualAntenna/README.md b/libraries/WiFi/examples/WiFiScanDualAntenna/README.md index f7ec7cc3ef9..9a6611149d0 100644 --- a/libraries/WiFi/examples/WiFiScanDualAntenna/README.md +++ b/libraries/WiFi/examples/WiFiScanDualAntenna/README.md @@ -17,10 +17,6 @@ This example is compatible with the ESP32-WROOM-DA. * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or set the `upload_port` option on the `platformio.ini` file. - ## Example/Log Output ``` diff --git a/libraries/WiFi/examples/WiFiScanTime/README.md b/libraries/WiFi/examples/WiFiScanTime/README.md index f56ba893925..7be0e05d4fe 100644 --- a/libraries/WiFi/examples/WiFiScanTime/README.md +++ b/libraries/WiFi/examples/WiFiScanTime/README.md @@ -18,10 +18,6 @@ Currently, this example supports the following targets. * Before Compile/Verify, select the correct board: `Tools -> Board`. * Select the COM port: `Tools -> Port: xxx` where the `xxx` is the detected COM port. -#### Using Platform IO - -* Select the COM port: `Devices` or setting the `upload_port` option on the `platformio.ini` file. - ## Example/Log Output ``` diff --git a/libraries/Zigbee/examples/Zigbee_Color_Dimmer_Switch/Zigbee_Color_Dimmer_Switch.ino b/libraries/Zigbee/examples/Zigbee_Color_Dimmer_Switch/Zigbee_Color_Dimmer_Switch.ino index 00e3cd170ef..15e120a1dbd 100644 --- a/libraries/Zigbee/examples/Zigbee_Color_Dimmer_Switch/Zigbee_Color_Dimmer_Switch.ino +++ b/libraries/Zigbee/examples/Zigbee_Color_Dimmer_Switch/Zigbee_Color_Dimmer_Switch.ino @@ -145,6 +145,6 @@ void loop() { static uint32_t last_print = 0; if (millis() - last_print > 30000) { last_print = millis(); - zbSwitch.printBoundDevices(); + zbSwitch.printBoundDevices(Serial); } } diff --git a/libraries/Zigbee/examples/Zigbee_On_Off_Switch/Zigbee_On_Off_Switch.ino b/libraries/Zigbee/examples/Zigbee_On_Off_Switch/Zigbee_On_Off_Switch.ino index e38734a052a..e12b8aaf9ea 100644 --- a/libraries/Zigbee/examples/Zigbee_On_Off_Switch/Zigbee_On_Off_Switch.ino +++ b/libraries/Zigbee/examples/Zigbee_On_Off_Switch/Zigbee_On_Off_Switch.ino @@ -138,17 +138,16 @@ void setup() { delay(500); } - // Optional: read manufacturer and model name from the bound light + // Optional: List all bound devices and read manufacturer and model name std::list boundLights = zbSwitch.getBoundDevices(); - //List all bound lights for (const auto &device : boundLights) { - Serial.printf("Device on endpoint %d, short address: 0x%x\n", device->endpoint, device->short_addr); + Serial.printf("Device on endpoint %d, short address: 0x%x\r\n", device->endpoint, device->short_addr); Serial.printf( - "IEEE Address: %02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X\n", device->ieee_addr[0], device->ieee_addr[1], device->ieee_addr[2], device->ieee_addr[3], - device->ieee_addr[4], device->ieee_addr[5], device->ieee_addr[6], device->ieee_addr[7] + "IEEE Address: %02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X\r\n", device->ieee_addr[7], device->ieee_addr[6], device->ieee_addr[5], device->ieee_addr[4], + device->ieee_addr[3], device->ieee_addr[2], device->ieee_addr[1], device->ieee_addr[0] ); - Serial.printf("Light manufacturer: %s", zbSwitch.readManufacturer(device->endpoint, device->short_addr)); - Serial.printf("Light model: %s", zbSwitch.readModel(device->endpoint, device->short_addr)); + Serial.printf("Light manufacturer: %s\r\n", zbSwitch.readManufacturer(device->endpoint, device->short_addr, device->ieee_addr)); + Serial.printf("Light model: %s\r\n", zbSwitch.readModel(device->endpoint, device->short_addr, device->ieee_addr)); } Serial.println(); @@ -191,6 +190,6 @@ void loop() { static uint32_t lastPrint = 0; if (millis() - lastPrint > 10000) { lastPrint = millis(); - zbSwitch.printBoundDevices(); + zbSwitch.printBoundDevices(Serial); } } diff --git a/libraries/Zigbee/examples/Zigbee_Temp_Hum_Sensor_Sleepy/Zigbee_Temp_Hum_Sensor_Sleepy.ino b/libraries/Zigbee/examples/Zigbee_Temp_Hum_Sensor_Sleepy/Zigbee_Temp_Hum_Sensor_Sleepy.ino index e13e6a877a6..0c2f9b56690 100644 --- a/libraries/Zigbee/examples/Zigbee_Temp_Hum_Sensor_Sleepy/Zigbee_Temp_Hum_Sensor_Sleepy.ino +++ b/libraries/Zigbee/examples/Zigbee_Temp_Hum_Sensor_Sleepy/Zigbee_Temp_Hum_Sensor_Sleepy.ino @@ -128,6 +128,7 @@ void loop() { // If key pressed for more than 3secs, factory reset Zigbee and reboot Serial.println("Resetting Zigbee to factory and rebooting in 1s."); delay(1000); + Zigbee.factoryReset(); } } } diff --git a/libraries/Zigbee/src/ZigbeeCore.cpp b/libraries/Zigbee/src/ZigbeeCore.cpp index 442dc4b7ee0..b1dc6211362 100644 --- a/libraries/Zigbee/src/ZigbeeCore.cpp +++ b/libraries/Zigbee/src/ZigbeeCore.cpp @@ -243,6 +243,7 @@ void esp_zb_app_signal_handler(esp_zb_app_signal_t *signal_struct) { } else { Zigbee._connected = true; } + Zigbee.searchBindings(); } } else { /* commissioning failed */ @@ -309,7 +310,6 @@ void esp_zb_app_signal_handler(esp_zb_app_signal_t *signal_struct) { Bit 6 – Security capability Bit 7 – Reserved */ - // for each endpoint in the list call the findEndpoint function if not bounded or allowed to bind multiple devices for (std::list::iterator it = Zigbee.ep_objects.begin(); it != Zigbee.ep_objects.end(); ++it) { if (!(*it)->bound() || (*it)->epAllowMultipleBinding()) { @@ -329,6 +329,12 @@ void esp_zb_app_signal_handler(esp_zb_app_signal_t *signal_struct) { } } break; + case ESP_ZB_ZDO_SIGNAL_LEAVE: // End Device + Router + // Device was removed from the network, factory reset the device + if ((zigbee_role_t)Zigbee.getRole() != ZIGBEE_COORDINATOR) { + Zigbee.factoryReset(); + } + break; default: log_v("ZDO signal: %s (0x%x), status: %s", esp_zb_zdo_signal_to_string(sig_type), sig_type, esp_err_to_name(err_status)); break; } } @@ -391,6 +397,75 @@ void ZigbeeCore::scanDelete() { _scan_status = ZB_SCAN_FAILED; } +// Recall bounded devices from the binding table after reboot +void ZigbeeCore::bindingTableCb(const esp_zb_zdo_binding_table_info_t *table_info, void *user_ctx) { + bool done = true; + esp_zb_zdo_mgmt_bind_param_t *req = (esp_zb_zdo_mgmt_bind_param_t *)user_ctx; + esp_zb_zdp_status_t zdo_status = (esp_zb_zdp_status_t)table_info->status; + log_d("Binding table callback for address 0x%04x with status %d", req->dst_addr, zdo_status); + if (zdo_status == ESP_ZB_ZDP_STATUS_SUCCESS) { + // Print binding table log simple + log_d("Binding table info: total %d, index %d, count %d", table_info->total, table_info->index, table_info->count); + + if (table_info->total == 0) { + log_d("No binding table entries found"); + free(req); + return; + } + + esp_zb_zdo_binding_table_record_t *record = table_info->record; + for (int i = 0; i < table_info->count; i++) { + log_d( + "Binding table record: src_endp %d, dst_endp %d, cluster_id 0x%04x, dst_addr_mode %d", record->src_endp, record->dst_endp, record->cluster_id, + record->dst_addr_mode + ); + + zb_device_params_t *device = (zb_device_params_t *)calloc(1, sizeof(zb_device_params_t)); + device->endpoint = record->dst_endp; + if (record->dst_addr_mode == ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT || record->dst_addr_mode == ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT) { + device->short_addr = record->dst_address.addr_short; + } else { //ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT + memcpy(device->ieee_addr, record->dst_address.addr_long, sizeof(esp_zb_ieee_addr_t)); + } + + // Add to list of bound devices of proper endpoint + for (std::list::iterator it = Zigbee.ep_objects.begin(); it != Zigbee.ep_objects.end(); ++it) { + if ((*it)->getEndpoint() == record->src_endp) { + (*it)->addBoundDevice(device); + log_d( + "Device bound to EP %d -> device endpoint: %d, short addr: 0x%04x, ieee addr: %02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", record->src_endp, + device->endpoint, device->short_addr, device->ieee_addr[7], device->ieee_addr[6], device->ieee_addr[5], device->ieee_addr[4], device->ieee_addr[3], + device->ieee_addr[2], device->ieee_addr[1], device->ieee_addr[0] + ); + } + } + record = record->next; + } + + // Continue reading the binding table + if (table_info->index + table_info->count < table_info->total) { + /* There are unreported binding table entries, request for them. */ + req->start_index = table_info->index + table_info->count; + esp_zb_zdo_binding_table_req(req, bindingTableCb, req); + done = false; + } + } + + if (done) { + // Print bound devices + log_d("Filling bounded devices finished"); + free(req); + } +} + +void ZigbeeCore::searchBindings() { + esp_zb_zdo_mgmt_bind_param_t *mb_req = (esp_zb_zdo_mgmt_bind_param_t *)malloc(sizeof(esp_zb_zdo_mgmt_bind_param_t)); + mb_req->dst_addr = esp_zb_get_short_address(); + mb_req->start_index = 0; + log_d("Requesting binding table for address 0x%04x", mb_req->dst_addr); + esp_zb_zdo_binding_table_req(mb_req, bindingTableCb, (void *)mb_req); +} + // Function to convert enum value to string const char *ZigbeeCore::getDeviceTypeString(esp_zb_ha_standard_devices_t deviceId) { switch (deviceId) { diff --git a/libraries/Zigbee/src/ZigbeeCore.h b/libraries/Zigbee/src/ZigbeeCore.h index 75fecd59198..6729b7cc9f4 100644 --- a/libraries/Zigbee/src/ZigbeeCore.h +++ b/libraries/Zigbee/src/ZigbeeCore.h @@ -80,6 +80,8 @@ class ZigbeeCore { bool zigbeeInit(esp_zb_cfg_t *zb_cfg, bool erase_nvs); static void scanCompleteCallback(esp_zb_zdp_status_t zdo_status, uint8_t count, esp_zb_network_descriptor_t *nwk_descriptor); const char *getDeviceTypeString(esp_zb_ha_standard_devices_t deviceId); + void searchBindings(); + static void bindingTableCb(const esp_zb_zdo_binding_table_info_t *table_info, void *user_ctx); public: ZigbeeCore(); diff --git a/libraries/Zigbee/src/ZigbeeEP.cpp b/libraries/Zigbee/src/ZigbeeEP.cpp index 0a947d3ab9e..dbfe8596268 100644 --- a/libraries/Zigbee/src/ZigbeeEP.cpp +++ b/libraries/Zigbee/src/ZigbeeEP.cpp @@ -7,6 +7,8 @@ #include "esp_zigbee_cluster.h" #include "zcl/esp_zigbee_zcl_power_config.h" +#define ZB_CMD_TIMEOUT 10000 // 10 seconds + bool ZigbeeEP::_is_bound = false; bool ZigbeeEP::_allow_multiple_binding = false; @@ -112,13 +114,20 @@ void ZigbeeEP::reportBatteryPercentage() { log_v("Battery percentage reported"); } -char *ZigbeeEP::readManufacturer(uint8_t endpoint, uint16_t short_addr) { +char *ZigbeeEP::readManufacturer(uint8_t endpoint, uint16_t short_addr, esp_zb_ieee_addr_t ieee_addr) { /* Read peer Manufacture Name & Model Identifier */ esp_zb_zcl_read_attr_cmd_t read_req; - read_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; + + if (short_addr != 0) { + read_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; + read_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; + } else { + read_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + memcpy(read_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + } + read_req.zcl_basic_cmd.src_endpoint = _endpoint; read_req.zcl_basic_cmd.dst_endpoint = endpoint; - read_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; read_req.clusterID = ESP_ZB_ZCL_CLUSTER_ID_BASIC; uint16_t attributes[] = { @@ -130,22 +139,31 @@ char *ZigbeeEP::readManufacturer(uint8_t endpoint, uint16_t short_addr) { // clear read manufacturer _read_manufacturer = nullptr; + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_read_attr_cmd_req(&read_req); + esp_zb_lock_release(); //Wait for response or timeout - if (xSemaphoreTake(lock, portMAX_DELAY) != pdTRUE) { + if (xSemaphoreTake(lock, ZB_CMD_TIMEOUT) != pdTRUE) { log_e("Error while reading manufacturer"); } return _read_manufacturer; } -char *ZigbeeEP::readModel(uint8_t endpoint, uint16_t short_addr) { +char *ZigbeeEP::readModel(uint8_t endpoint, uint16_t short_addr, esp_zb_ieee_addr_t ieee_addr) { /* Read peer Manufacture Name & Model Identifier */ esp_zb_zcl_read_attr_cmd_t read_req; - read_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; + + if (short_addr != 0) { + read_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; + read_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; + } else { + read_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + memcpy(read_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + } + read_req.zcl_basic_cmd.src_endpoint = _endpoint; read_req.zcl_basic_cmd.dst_endpoint = endpoint; - read_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; read_req.clusterID = ESP_ZB_ZCL_CLUSTER_ID_BASIC; uint16_t attributes[] = { @@ -157,11 +175,12 @@ char *ZigbeeEP::readModel(uint8_t endpoint, uint16_t short_addr) { // clear read model _read_model = nullptr; + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_read_attr_cmd_req(&read_req); + esp_zb_lock_release(); //Wait for response or timeout - //Semaphore take - if (xSemaphoreTake(lock, portMAX_DELAY) != pdTRUE) { + if (xSemaphoreTake(lock, ZB_CMD_TIMEOUT) != pdTRUE) { log_e("Error while reading model"); } return _read_model; @@ -171,8 +190,23 @@ void ZigbeeEP::printBoundDevices() { log_i("Bound devices:"); for ([[maybe_unused]] const auto &device : _bound_devices) { - log_i("Device on endpoint %d, short address: 0x%x", device->endpoint, device->short_addr); - print_ieee_addr(device->ieee_addr); + log_i( + "Device on endpoint %d, short address: 0x%x, ieee address: %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", device->endpoint, device->short_addr, + device->ieee_addr[7], device->ieee_addr[6], device->ieee_addr[5], device->ieee_addr[4], device->ieee_addr[3], device->ieee_addr[2], device->ieee_addr[1], + device->ieee_addr[0] + ); + } +} + +void ZigbeeEP::printBoundDevices(Print &print) { + print.println("Bound devices:"); + for ([[maybe_unused]] + const auto &device : _bound_devices) { + print.printf( + "Device on endpoint %d, short address: 0x%x, ieee address: %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x\r\n", device->endpoint, device->short_addr, + device->ieee_addr[7], device->ieee_addr[6], device->ieee_addr[5], device->ieee_addr[4], device->ieee_addr[3], device->ieee_addr[2], device->ieee_addr[1], + device->ieee_addr[0] + ); } } diff --git a/libraries/Zigbee/src/ZigbeeEP.h b/libraries/Zigbee/src/ZigbeeEP.h index 21ed7706d31..522c84620ff 100644 --- a/libraries/Zigbee/src/ZigbeeEP.h +++ b/libraries/Zigbee/src/ZigbeeEP.h @@ -9,8 +9,6 @@ /* Useful defines */ #define ZB_ARRAY_LENTH(arr) (sizeof(arr) / sizeof(arr[0])) -#define print_ieee_addr(addr) \ - log_i("IEEE Address: %02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]) #define XYZ_TO_RGB(X, Y, Z, r, g, b) \ { \ r = (float)(3.240479 * (X) - 1.537150 * (Y) - 0.498535 * (Z)); \ @@ -69,6 +67,8 @@ class ZigbeeEP { } void printBoundDevices(); + void printBoundDevices(Print &print); + std::list getBoundDevices() const { return _bound_devices; } @@ -87,8 +87,8 @@ class ZigbeeEP { void reportBatteryPercentage(); // Methods to read manufacturer and model name from selected endpoint and short address - char *readManufacturer(uint8_t endpoint, uint16_t short_addr); - char *readModel(uint8_t endpoint, uint16_t short_addr); + char *readManufacturer(uint8_t endpoint, uint16_t short_addr, esp_zb_ieee_addr_t ieee_addr); + char *readModel(uint8_t endpoint, uint16_t short_addr, esp_zb_ieee_addr_t ieee_addr); bool epAllowMultipleBinding() { return _allow_multiple_binding; @@ -108,7 +108,6 @@ class ZigbeeEP { } private: - static bool _allow_multiple_binding; char *_read_manufacturer; char *_read_model; void (*_on_identify)(uint16_t time); @@ -119,10 +118,15 @@ class ZigbeeEP { esp_zb_endpoint_config_t _ep_config; esp_zb_cluster_list_t *_cluster_list; static bool _is_bound; + static bool _allow_multiple_binding; std::list _bound_devices; SemaphoreHandle_t lock; zb_power_source_t _power_source; + void addBoundDevice(zb_device_params_t *device) { + _bound_devices.push_back(device); + _is_bound = true; + } friend class ZigbeeCore; }; diff --git a/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp b/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp index 8e72728f6a2..4fd492a5477 100644 --- a/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp +++ b/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp @@ -98,10 +98,10 @@ void ZigbeeColorDimmerSwitch::lightToggle() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command"); - //esp_zb_lock_acquire(portMAX_DELAY); + log_v("Sending 'light toggle' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); - //esp_zb_lock_release(); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -114,7 +114,7 @@ void ZigbeeColorDimmerSwitch::lightToggle(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command to group address 0x%x", group_addr); + log_v("Sending 'light toggle' command to group address 0x%x", group_addr); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -131,7 +131,27 @@ void ZigbeeColorDimmerSwitch::lightToggle(uint8_t endpoint, uint16_t short_addr) cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light toggle' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeColorDimmerSwitch::lightToggle(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light toggle' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -146,7 +166,7 @@ void ZigbeeColorDimmerSwitch::lightOn() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command"); + log_v("Sending 'light on' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -162,7 +182,7 @@ void ZigbeeColorDimmerSwitch::lightOn(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command to group address 0x%x", group_addr); + log_v("Sending 'light on' command to group address 0x%x", group_addr); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -179,7 +199,27 @@ void ZigbeeColorDimmerSwitch::lightOn(uint8_t endpoint, uint16_t short_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light on' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeColorDimmerSwitch::lightOn(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light on' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -194,7 +234,7 @@ void ZigbeeColorDimmerSwitch::lightOff() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command"); + log_v("Sending 'light off' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -210,7 +250,7 @@ void ZigbeeColorDimmerSwitch::lightOff(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command to group address 0x%x", group_addr); + log_v("Sending 'light off' command to group address 0x%x", group_addr); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -227,7 +267,27 @@ void ZigbeeColorDimmerSwitch::lightOff(uint8_t endpoint, uint16_t short_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light off' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeColorDimmerSwitch::lightOff(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light off' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -243,7 +303,7 @@ void ZigbeeColorDimmerSwitch::lightOffWithEffect(uint8_t effect_id, uint8_t effe cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.effect_id = effect_id; cmd_req.effect_variant = effect_variant; - log_i("Sending 'light off with effect' command"); + log_v("Sending 'light off with effect' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_off_with_effect_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -257,7 +317,7 @@ void ZigbeeColorDimmerSwitch::lightOnWithSceneRecall() { esp_zb_zcl_on_off_on_with_recall_global_scene_cmd_t cmd_req; cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; - log_i("Sending 'light on with scene recall' command"); + log_v("Sending 'light on with scene recall' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_on_with_recall_global_scene_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -274,7 +334,7 @@ void ZigbeeColorDimmerSwitch::lightOnWithTimedOff(uint8_t on_off_control, uint16 cmd_req.on_off_control = on_off_control; //TODO: Test how it works, then maybe change API cmd_req.on_time = time_on; cmd_req.off_wait_time = time_off; - log_i("Sending 'light on with time off' command"); + log_v("Sending 'light on with time off' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_on_with_timed_off_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -290,7 +350,7 @@ void ZigbeeColorDimmerSwitch::setLightLevel(uint8_t level) { cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.level = level; cmd_req.transition_time = 0xffff; - log_i("Sending 'set light level' command"); + log_v("Sending 'set light level' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_level_move_to_level_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -307,7 +367,7 @@ void ZigbeeColorDimmerSwitch::setLightLevel(uint8_t level, uint16_t group_addr) cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.level = level; cmd_req.transition_time = 0xffff; - log_i("Sending 'set light level' command to group address 0x%x", group_addr); + log_v("Sending 'set light level' command to group address 0x%x", group_addr); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_level_move_to_level_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -325,7 +385,28 @@ void ZigbeeColorDimmerSwitch::setLightLevel(uint8_t level, uint8_t endpoint, uin cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.level = level; cmd_req.transition_time = 0xffff; - log_i("Sending 'set light level' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'set light level' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_level_move_to_level_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeColorDimmerSwitch::setLightLevel(uint8_t level, uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_move_to_level_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + cmd_req.level = level; + cmd_req.transition_time = 0xffff; + log_v( + "Sending 'set light level' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], + ieee_addr[5], ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_level_move_to_level_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -346,7 +427,7 @@ void ZigbeeColorDimmerSwitch::setLightColor(uint8_t red, uint8_t green, uint8_t cmd_req.color_x = color_x; cmd_req.color_y = color_y; cmd_req.transition_time = 0; - log_i("Sending 'set light color' command"); + log_v("Sending 'set light color' command"); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_color_move_to_color_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -368,7 +449,7 @@ void ZigbeeColorDimmerSwitch::setLightColor(uint8_t red, uint8_t green, uint8_t cmd_req.color_x = color_x; cmd_req.color_y = color_y; cmd_req.transition_time = 0; - log_i("Sending 'set light color' command to group address 0x%x", group_addr); + log_v("Sending 'set light color' command to group address 0x%x", group_addr); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_color_move_to_color_cmd_req(&cmd_req); esp_zb_lock_release(); @@ -391,7 +472,33 @@ void ZigbeeColorDimmerSwitch::setLightColor(uint8_t red, uint8_t green, uint8_t cmd_req.color_x = color_x; cmd_req.color_y = color_y; cmd_req.transition_time = 0; - log_i("Sending 'set light color' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'set light color' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_color_move_to_color_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeColorDimmerSwitch::setLightColor(uint8_t red, uint8_t green, uint8_t blue, uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + //Convert RGB to XY + uint16_t color_x, color_y; + calculateXY(red, green, blue, color_x, color_y); + + esp_zb_zcl_color_move_to_color_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + cmd_req.color_x = color_x; + cmd_req.color_y = color_y; + cmd_req.transition_time = 0; + log_v( + "Sending 'set light color' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], + ieee_addr[5], ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_color_move_to_color_cmd_req(&cmd_req); esp_zb_lock_release(); diff --git a/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.h b/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.h index 9b623d2f8ef..8e2a4d9e1a3 100644 --- a/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.h +++ b/libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.h @@ -18,14 +18,17 @@ class ZigbeeColorDimmerSwitch : public ZigbeeEP { void lightToggle(); void lightToggle(uint16_t group_addr); void lightToggle(uint8_t endpoint, uint16_t short_addr); + void lightToggle(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOn(); void lightOn(uint16_t group_addr); void lightOn(uint8_t endpoint, uint16_t short_addr); + void lightOn(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOff(); void lightOff(uint16_t group_addr); void lightOff(uint8_t endpoint, uint16_t short_addr); + void lightOff(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOffWithEffect(uint8_t effect_id, uint8_t effect_variant); void lightOnWithTimedOff(uint8_t on_off_control, uint16_t time_on, uint16_t time_off); @@ -34,10 +37,12 @@ class ZigbeeColorDimmerSwitch : public ZigbeeEP { void setLightLevel(uint8_t level); void setLightLevel(uint8_t level, uint16_t group_addr); void setLightLevel(uint8_t level, uint8_t endpoint, uint16_t short_addr); + void setLightLevel(uint8_t level, uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void setLightColor(uint8_t red, uint8_t green, uint8_t blue); void setLightColor(uint8_t red, uint8_t green, uint8_t blue, uint16_t group_addr); void setLightColor(uint8_t red, uint8_t green, uint8_t blue, uint8_t endpoint, uint16_t short_addr); + void setLightColor(uint8_t red, uint8_t green, uint8_t blue, uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); private: // save instance of the class in order to use it in static functions diff --git a/libraries/Zigbee/src/ep/ZigbeeSwitch.cpp b/libraries/Zigbee/src/ep/ZigbeeSwitch.cpp index 16af8008a8a..f6b36d7f0d4 100644 --- a/libraries/Zigbee/src/ep/ZigbeeSwitch.cpp +++ b/libraries/Zigbee/src/ep/ZigbeeSwitch.cpp @@ -59,7 +59,6 @@ void ZigbeeSwitch::findEndpoint(esp_zb_zdo_match_desc_req_param_t *cmd_req) { .num_out_clusters = 1, .cluster_list = cluster_list, }; - esp_zb_zdo_match_cluster(&on_off_req, findCb, &_endpoint); } @@ -70,8 +69,10 @@ void ZigbeeSwitch::lightToggle() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command"); + log_v("Sending 'light toggle' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -84,8 +85,10 @@ void ZigbeeSwitch::lightToggle(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command to group address 0x%x", group_addr); + log_v("Sending 'light toggle' command to group address 0x%x", group_addr); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -99,8 +102,30 @@ void ZigbeeSwitch::lightToggle(uint8_t endpoint, uint16_t short_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; - log_i("Sending 'light toggle' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light toggle' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeSwitch::lightToggle(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_TOGGLE_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light toggle' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -112,8 +137,10 @@ void ZigbeeSwitch::lightOn() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command"); + log_v("Sending 'light on' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -126,8 +153,10 @@ void ZigbeeSwitch::lightOn(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command to group address 0x%x", group_addr); + log_v("Sending 'light on' command to group address 0x%x", group_addr); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -141,8 +170,30 @@ void ZigbeeSwitch::lightOn(uint8_t endpoint, uint16_t short_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; - log_i("Sending 'light on' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light on' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeSwitch::lightOn(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_ON_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light on' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -154,8 +205,10 @@ void ZigbeeSwitch::lightOff() { cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command"); + log_v("Sending 'light off' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -168,8 +221,10 @@ void ZigbeeSwitch::lightOff(uint16_t group_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = group_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_GROUP_ENDP_NOT_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command to group address 0x%x", group_addr); + log_v("Sending 'light off' command to group address 0x%x", group_addr); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -183,8 +238,30 @@ void ZigbeeSwitch::lightOff(uint8_t endpoint, uint16_t short_addr) { cmd_req.zcl_basic_cmd.dst_addr_u.addr_short = short_addr; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_16_ENDP_PRESENT; cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; - log_i("Sending 'light off' command to endpoint %d, address 0x%x", endpoint, short_addr); + log_v("Sending 'light off' command to endpoint %d, address 0x%x", endpoint, short_addr); + esp_zb_lock_acquire(portMAX_DELAY); + esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); + } else { + log_e("Light not bound"); + } +} + +void ZigbeeSwitch::lightOff(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr) { + if (_is_bound) { + esp_zb_zcl_on_off_cmd_t cmd_req; + cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; + cmd_req.zcl_basic_cmd.dst_endpoint = endpoint; + cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_64_ENDP_PRESENT; + cmd_req.on_off_cmd_id = ESP_ZB_ZCL_CMD_ON_OFF_OFF_ID; + memcpy(cmd_req.zcl_basic_cmd.dst_addr_u.addr_long, ieee_addr, sizeof(esp_zb_ieee_addr_t)); + log_v( + "Sending 'light off' command to endpoint %d, ieee address %02x:%02x:%02x:%02x:%02x:%02x:%02x:%02x", endpoint, ieee_addr[7], ieee_addr[6], ieee_addr[5], + ieee_addr[4], ieee_addr[3], ieee_addr[2], ieee_addr[1], ieee_addr[0] + ); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -197,8 +274,10 @@ void ZigbeeSwitch::lightOffWithEffect(uint8_t effect_id, uint8_t effect_variant) cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; cmd_req.effect_id = effect_id; cmd_req.effect_variant = effect_variant; - log_i("Sending 'light off with effect' command"); + log_v("Sending 'light off with effect' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_off_with_effect_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -209,8 +288,10 @@ void ZigbeeSwitch::lightOnWithSceneRecall() { esp_zb_zcl_on_off_on_with_recall_global_scene_cmd_t cmd_req; cmd_req.zcl_basic_cmd.src_endpoint = _endpoint; cmd_req.address_mode = ESP_ZB_APS_ADDR_MODE_DST_ADDR_ENDP_NOT_PRESENT; - log_i("Sending 'light on with scene recall' command"); + log_v("Sending 'light on with scene recall' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_on_with_recall_global_scene_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } @@ -223,8 +304,10 @@ void ZigbeeSwitch::lightOnWithTimedOff(uint8_t on_off_control, uint16_t time_on, cmd_req.on_off_control = on_off_control; //TODO: Test how it works, then maybe change API cmd_req.on_time = time_on; cmd_req.off_wait_time = time_off; - log_i("Sending 'light on with time off' command"); + log_v("Sending 'light on with time off' command"); + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_on_off_on_with_timed_off_cmd_req(&cmd_req); + esp_zb_lock_release(); } else { log_e("Light not bound"); } diff --git a/libraries/Zigbee/src/ep/ZigbeeSwitch.h b/libraries/Zigbee/src/ep/ZigbeeSwitch.h index a8d892f37e9..62264641378 100644 --- a/libraries/Zigbee/src/ep/ZigbeeSwitch.h +++ b/libraries/Zigbee/src/ep/ZigbeeSwitch.h @@ -18,14 +18,17 @@ class ZigbeeSwitch : public ZigbeeEP { void lightToggle(); void lightToggle(uint16_t group_addr); void lightToggle(uint8_t endpoint, uint16_t short_addr); + void lightToggle(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOn(); void lightOn(uint16_t group_addr); void lightOn(uint8_t endpoint, uint16_t short_addr); + void lightOn(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOff(); void lightOff(uint16_t group_addr); void lightOff(uint8_t endpoint, uint16_t short_addr); + void lightOff(uint8_t endpoint, esp_zb_ieee_addr_t ieee_addr); void lightOffWithEffect(uint8_t effect_id, uint8_t effect_variant); void lightOnWithTimedOff(uint8_t on_off_control, uint16_t time_on, uint16_t time_off); diff --git a/libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp b/libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp index 718c892f638..3dfac0489dd 100644 --- a/libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp +++ b/libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp @@ -60,7 +60,9 @@ void ZigbeeTempSensor::setReporting(uint16_t min_interval, uint16_t max_interval }, .manuf_code = ESP_ZB_ZCL_ATTR_NON_MANUFACTURER_SPECIFIC, }; + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_update_reporting_info(&reporting_info); + esp_zb_lock_release(); } void ZigbeeTempSensor::setTemperature(float temperature) { @@ -158,7 +160,9 @@ void ZigbeeTempSensor::setHumidityReporting(uint16_t min_interval, uint16_t max_ }, .manuf_code = ESP_ZB_ZCL_ATTR_NON_MANUFACTURER_SPECIFIC, }; + esp_zb_lock_acquire(portMAX_DELAY); esp_zb_zcl_update_reporting_info(&reporting_info); + esp_zb_lock_release(); } #endif //SOC_IEEE802154_SUPPORTED && CONFIG_ZB_ENABLED diff --git a/tests/performance/psramspeed/test_psramspeed.py b/tests/performance/psramspeed/test_psramspeed.py index 8d051580799..9e96e158504 100644 --- a/tests/performance/psramspeed/test_psramspeed.py +++ b/tests/performance/psramspeed/test_psramspeed.py @@ -74,7 +74,7 @@ def test_psramspeed(dut, request): sums[(test, size, impl)]["time_sum"] += time avg_results = {} - for (test, size, impl) in sums: + for test, size, impl in sums: rate_avg = round(sums[(test, size, impl)]["rate_sum"] / runs, 2) time_avg = round(sums[(test, size, impl)]["time_sum"] / runs, 2) LOGGER.info( diff --git a/tests/performance/ramspeed/test_ramspeed.py b/tests/performance/ramspeed/test_ramspeed.py index b4c3cee7f9b..dbe1670d329 100644 --- a/tests/performance/ramspeed/test_ramspeed.py +++ b/tests/performance/ramspeed/test_ramspeed.py @@ -74,7 +74,7 @@ def test_ramspeed(dut, request): sums[(test, size, impl)]["time_sum"] += time avg_results = {} - for (test, size, impl) in sums: + for test, size, impl in sums: rate_avg = round(sums[(test, size, impl)]["rate_sum"] / runs, 2) time_avg = round(sums[(test, size, impl)]["time_sum"] / runs, 2) LOGGER.info( diff --git a/tests/validation/uart/ci.json b/tests/validation/uart/ci.json index dc24acbaee2..54da33b6176 100644 --- a/tests/validation/uart/ci.json +++ b/tests/validation/uart/ci.json @@ -1,8 +1,5 @@ { "platforms": { "qemu": false - }, - "targets": { - "esp32p4": false } } diff --git a/tests/validation/uart/diagram.esp32.json b/tests/validation/uart/diagram.esp32.json new file mode 100644 index 00000000000..a31c06d8313 --- /dev/null +++ b/tests/validation/uart/diagram.esp32.json @@ -0,0 +1,24 @@ +{ + "version": 1, + "author": "lucasssvaz", + "editor": "wokwi", + "parts": [ + { + "type": "board-esp32-devkit-c-v4", + "id": "esp", + "attrs": { "cpuFrequency": "40" } + } + ], + "connections": [ + [ + "esp:TX", + "$serialMonitor:RX", + "" + ], + [ + "esp:RX", + "$serialMonitor:TX", + "" + ] + ] +} diff --git a/tests/validation/uart/uart.ino b/tests/validation/uart/uart.ino index 01c449867db..27bd95da7f8 100644 --- a/tests/validation/uart/uart.ino +++ b/tests/validation/uart/uart.ino @@ -2,25 +2,20 @@ * * This test is using UART0 (Serial) only for reporting test status and helping with the auto * baudrate detection test. - * UART1 (Serial1) and UART2 (Serial2), where available, are used for testing. + * The other serials are used for testing. */ -#include -#include "HardwareSerial.h" -#include "esp_rom_gpio.h" -#include "Wire.h" - // Default pins: -// | Name | ESP32 | S2 | S3 | C3 | C6 | H2 | -// UART0 RX | SOC_RX0 | 3 | 44 | 44 | 20 | 17 | 23 | -// UART0 TX | SOC_TX0 | 1 | 43 | 43 | 21 | 16 | 24 | -// UART1 RX | RX1 | 26 | 4 | 15 | 18 | 4 | 0 | -// UART1 TX | TX1 | 27 | 5 | 16 | 19 | 5 | 1 | -// UART2 RX | RX2 | 4 | -- | 19 | -- | -- | -- | -// UART2 TX | TX2 | 25 | -- | 20 | -- | -- | -- | +// | Name | ESP32 | S2 | S3 | C3 | C6 | H2 | P4 | +// UART0 RX | SOC_RX0 | 3 | 44 | 44 | 20 | 17 | 23 | 38 | +// UART0 TX | SOC_TX0 | 1 | 43 | 43 | 21 | 16 | 24 | 37 | +// UART1 RX | RX1 | 26 | 4 | 15 | 18 | 4 | 0 | 11 | +// UART1 TX | TX1 | 27 | 5 | 16 | 19 | 5 | 1 | 10 | +// UART2 RX | RX2 | 4 | -- | 19 | -- | -- | -- | -- | +// UART2 TX | TX2 | 25 | -- | 20 | -- | -- | -- | -- | /* - * For 2 UARTS: + * For each UART: * * terminal * | ^ @@ -30,119 +25,95 @@ * report status * | * TX <---> RX - * UART1 - * - * For 3 UARTS: - * - * =====terminal====== - * ^ | ^ ^ - * | v UART0 | | - * | RX TX | - * | | - * ^ report status ^ - * | | - * | TX ---> RX | - * UART2 RX <--- TX UART1 - * + * UARTx */ -#if SOC_UART_HP_NUM == 2 -// Used for the pin swap test -#define NEW_RX1 9 -#define NEW_TX1 10 -#endif +#include +#include +#include "HardwareSerial.h" +#include "esp_rom_gpio.h" +#include "Wire.h" -// ESP32-P4 has no UART pin definition for RX2, TX2, RX3, TX3, RX4, TX4 -#ifndef RX2 -#define RX2 RX1 -#endif -#ifndef TX2 -#define TX2 RX1 -#endif +/* Utility defines */ -/* Utility global variables */ +#define TEST_UART_NUM (uart_test_configs.size()) -static String recv_msg = ""; -static int peeked_char = -1; +/* Utility classes */ -/* Utility functions */ +class UARTTestConfig { +public: + int uart_num; + HardwareSerial &serial; + int peeked_char; + int8_t default_rx_pin; + int8_t default_tx_pin; + String recv_msg; -extern int8_t uart_get_RxPin(uint8_t uart_num); -extern int8_t uart_get_TxPin(uint8_t uart_num); + UARTTestConfig(int num, HardwareSerial &serial_ref, int8_t rx_pin, int8_t tx_pin) + : uart_num(num), serial(serial_ref), peeked_char(-1), default_rx_pin(rx_pin), default_tx_pin(tx_pin), recv_msg("") {} -// This function starts all the available test UARTs -void start_serial(unsigned long baudrate = 115200) { -#if SOC_UART_HP_NUM >= 2 - Serial1.begin(baudrate); - while (!Serial1) { - delay(10); + void begin(unsigned long baudrate) { + serial.begin(baudrate, SERIAL_8N1, default_rx_pin, default_tx_pin); + while (!serial) { + delay(10); + } } -#endif -#if SOC_UART_HP_NUM >= 3 - Serial2.begin(baudrate); - while (!Serial2) { - delay(10); + void end() { + serial.end(); } -#endif -} - -// This function stops all the available test UARTs -void stop_serial(bool hard_stop = false) { -#if SOC_UART_HP_NUM >= 2 - Serial1.end(/*hard_stop*/); -#endif - -#if SOC_UART_HP_NUM >= 3 - Serial2.end(/*hard_stop*/); -#endif -} -// This function transmits a message and checks if it was received correctly -void transmit_and_check_msg(const String msg_append, bool perform_assert = true) { - delay(100); // Wait for some settings changes to take effect -#if SOC_UART_HP_NUM == 2 - Serial1.print("Hello from Serial1 (UART1) >>> via loopback >>> Serial1 (UART1) " + msg_append); - Serial1.flush(); - delay(100); - if (perform_assert) { - TEST_ASSERT_EQUAL_STRING(("Hello from Serial1 (UART1) >>> via loopback >>> Serial1 (UART1) " + msg_append).c_str(), recv_msg.c_str()); + void reset_buffers() { + recv_msg = ""; + peeked_char = -1; } -#elif SOC_UART_HP_NUM >= 3 - Serial1.print("Hello from Serial1 (UART1) >>> to >>> Serial2 (UART2) " + msg_append); - Serial1.flush(); - delay(100); - if (perform_assert) { - TEST_ASSERT_EQUAL_STRING(("Hello from Serial1 (UART1) >>> to >>> Serial2 (UART2) " + msg_append).c_str(), recv_msg.c_str()); + + void transmit_and_check_msg(const String &msg_append, bool perform_assert = true) { + reset_buffers(); + delay(100); + serial.print("Hello from Serial" + String(uart_num) + " " + msg_append); + serial.flush(); + delay(100); + if (perform_assert) { + TEST_ASSERT_EQUAL_STRING(("Hello from Serial" + String(uart_num) + " " + msg_append).c_str(), recv_msg.c_str()); + log_d("UART%d received message: %s\n", uart_num, recv_msg.c_str()); + } } - Serial2.print("Hello from Serial2 (UART2) >>> to >>> Serial1 (UART1) " + msg_append); - Serial2.flush(); - delay(100); - if (perform_assert) { - TEST_ASSERT_EQUAL_STRING(("Hello from Serial2 (UART2) >>> to >>> Serial1 (UART1) " + msg_append).c_str(), recv_msg.c_str()); + void onReceive() { + char c; + size_t available = serial.available(); + if (peeked_char == -1) { + peeked_char = serial.peek(); + } + while (available--) { + c = (char)serial.read(); + recv_msg += c; + } } -#else - log_d("No UARTs available for transmission"); - TEST_FAIL(); -#endif -} +}; + +/* Utility global variables */ + +[[maybe_unused]] +static const int NEW_RX1 = 9; +[[maybe_unused]] +static const int NEW_TX1 = 10; +std::vector uart_test_configs; + +/* Utility functions */ + +extern "C" int8_t uart_get_RxPin(uint8_t uart_num); +extern "C" int8_t uart_get_TxPin(uint8_t uart_num); /* Tasks */ // This task is used to send a message after a delay to test the auto baudrate detection void task_delayed_msg(void *pvParameters) { - HardwareSerial *selected_serial; - -#if SOC_UART_HP_NUM == 2 - selected_serial = &Serial; -#elif SOC_UART_HP_NUM >= 3 - selected_serial = &Serial1; -#endif - + HardwareSerial &selected_serial = uart_test_configs.size() == 1 ? Serial : Serial1; delay(2000); - selected_serial->println("Hello from Serial1 to detect baudrate"); - selected_serial->flush(); + selected_serial.println("Hello to detect baudrate"); + selected_serial.flush(); vTaskDelete(NULL); } @@ -150,67 +121,23 @@ void task_delayed_msg(void *pvParameters) { // This function is automatically called by unity before each test is run void setUp(void) { - start_serial(115200); -#if SOC_UART_HP_NUM == 2 - log_d("Setup internal loop-back from and back to Serial1 (UART1) TX >> Serial1 (UART1) RX"); - - Serial1.onReceive([]() { - onReceive_cb(Serial1); - }); - uart_internal_loopback(1, RX1); -#elif SOC_UART_HP_NUM >= 3 - log_d("Setup internal loop-back between Serial1 (UART1) <<--->> Serial2 (UART2)"); - - Serial1.onReceive([]() { - onReceive_cb(Serial1); - }); - Serial2.onReceive([]() { - onReceive_cb(Serial2); - }); - uart_internal_loopback(1, RX2); - uart_internal_loopback(2, RX1); -#endif + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + //log_d("Setup internal loop-back from and back to UART%d TX >> UART%d RX", config.uart_num, config.uart_num); + config.begin(115200); + config.serial.onReceive([&config]() { + config.onReceive(); + }); + uart_internal_loopback(config.uart_num, uart_get_RxPin(config.uart_num)); + } } // This function is automatically called by unity after each test is run void tearDown(void) { - stop_serial(); -} - -/* Callback functions */ - -// This is a callback function that will be activated on UART RX events -void onReceive_cb(HardwareSerial &selected_serial) { - int uart_num = -1; - char c; - - (void)uart_num; // Avoid compiler warning when debug level is set to none - - if (&selected_serial == &Serial) { - uart_num = 0; -#if SOC_UART_HP_NUM >= 2 - } else if (&selected_serial == &Serial1) { - uart_num = 1; -#endif -#if SOC_UART_HP_NUM >= 3 - } else if (&selected_serial == &Serial2) { - uart_num = 2; -#endif - } - - recv_msg = ""; - size_t available = selected_serial.available(); - - if (available != 0) { - peeked_char = selected_serial.peek(); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + config.end(); } - - while (available--) { - c = (char)selected_serial.read(); - recv_msg += c; - } - - log_d("UART %d received message: %s\n", uart_num, recv_msg.c_str()); } /* Test functions */ @@ -219,40 +146,33 @@ void onReceive_cb(HardwareSerial &selected_serial) { void basic_transmission_test(void) { log_d("Performing basic transmission test"); - transmit_and_check_msg(""); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + config.transmit_and_check_msg(""); + } Serial.println("Basic transmission test successful"); } // This test checks if the baudrate can be changed and if the message can be transmitted and received correctly after the change void change_baudrate_test(void) { - //Test first using the updateBaudRate method and then using the begin method - log_d("Changing baudrate to 9600"); - - //Baudrate error should be within 2% of the target baudrate - Serial1.updateBaudRate(9600); - TEST_ASSERT_UINT_WITHIN(192, 9600, Serial1.baudRate()); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + log_d("Changing baudrate of UART%d to 9600", config.uart_num); -#if SOC_UART_HP_NUM >= 3 - Serial2.updateBaudRate(9600); - TEST_ASSERT_UINT_WITHIN(192, 9600, Serial2.baudRate()); -#endif - - log_d("Sending string using 9600 baudrate"); - transmit_and_check_msg("using 9600 baudrate"); + //Baudrate error should be within 2% of the target baudrate + config.serial.updateBaudRate(9600); + TEST_ASSERT_UINT_WITHIN(192, 9600, config.serial.baudRate()); - log_d("Changing baudrate back to 115200"); - start_serial(115200); + log_d("Sending string on UART%d using 9600 baudrate", config.uart_num); + config.transmit_and_check_msg("using 9600 baudrate"); - //Baudrate error should be within 2% of the target baudrate - TEST_ASSERT_UINT_WITHIN(2304, 115200, Serial1.baudRate()); + config.serial.begin(115200); + TEST_ASSERT_UINT_WITHIN(2304, 115200, config.serial.baudRate()); -#if SOC_UART_HP_NUM >= 3 - TEST_ASSERT_UINT_WITHIN(2304, 115200, Serial2.baudRate()); -#endif - - log_d("Sending string using 115200 baudrate"); - transmit_and_check_msg("using 115200 baudrate"); + log_d("Sending string on UART%d using 115200 baudrate", config.uart_num); + config.transmit_and_check_msg("using 115200 baudrate"); + } Serial.println("Change baudrate test successful"); } @@ -269,7 +189,7 @@ void resize_buffers_test(void) { ret = Serial1.setTxBufferSize(256); TEST_ASSERT_EQUAL(0, ret); - stop_serial(); + Serial1.end(); log_d("Trying to resize RX buffer while stopped."); ret = Serial1.setRxBufferSize(256); @@ -285,7 +205,12 @@ void resize_buffers_test(void) { // This test checks if the begin function can be called when the UART is already running void begin_when_running_test(void) { log_d("Trying to set up serial twice"); - start_serial(115200); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + // Calling twice should not crash + config.begin(115200); + config.begin(115200); + } Serial.println("Begin when running test successful"); } @@ -293,9 +218,12 @@ void begin_when_running_test(void) { void end_when_stopped_test(void) { log_d("Trying to end serial twice"); - // Calling end(true) twice should not crash - stop_serial(true); - stop_serial(true); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + // Calling twice should not crash + config.end(); + config.end(); + } Serial.println("End when stopped test successful"); } @@ -319,7 +247,7 @@ void enabled_uart_calls_test(void) { TEST_ASSERT_EQUAL(true, boolean_ret); log_d("Checking if Serial 1 is peekable while running"); - TEST_ASSERT_GREATER_OR_EQUAL(0, peeked_char); + TEST_ASSERT_GREATER_OR_EQUAL(0, uart_test_configs[0]->peeked_char); log_d("Checking if Serial 1 can read bytes while running"); integer_ret = Serial1.readBytes(test_buf, 1); @@ -355,7 +283,10 @@ void disabled_uart_calls_test(void) { int integer_ret; uint8_t test_buf[1]; - stop_serial(); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + config.end(); + } log_d("Checking if Serial 1 can set the RX timeout when stopped"); boolean_ret = Serial1.setRxTimeout(1); @@ -423,44 +354,35 @@ void disabled_uart_calls_test(void) { // This test checks if the pins can be changed and if the message can be transmitted and received correctly after the change void change_pins_test(void) { - //stop_serial(); - log_d("Disabling UART loopback"); -#if SOC_UART_HP_NUM == 2 - esp_rom_gpio_connect_out_signal(SOC_RX0, SIG_GPIO_OUT_IDX, false, false); -#elif SOC_UART_HP_NUM >= 3 - esp_rom_gpio_connect_out_signal(RX1, SIG_GPIO_OUT_IDX, false, false); - esp_rom_gpio_connect_out_signal(RX2, SIG_GPIO_OUT_IDX, false, false); -#endif - - log_d("Swapping UART pins"); - -#if SOC_UART_HP_NUM == 2 - Serial1.setPins(NEW_RX1, NEW_TX1); - TEST_ASSERT_EQUAL(NEW_RX1, uart_get_RxPin(1)); - TEST_ASSERT_EQUAL(NEW_TX1, uart_get_TxPin(1)); -#elif SOC_UART_HP_NUM >= 3 - Serial1.setPins(RX2, TX2); - Serial2.setPins(RX1, TX1); - TEST_ASSERT_EQUAL(RX2, uart_get_RxPin(1)); - TEST_ASSERT_EQUAL(TX2, uart_get_TxPin(1)); - TEST_ASSERT_EQUAL(RX1, uart_get_RxPin(2)); - TEST_ASSERT_EQUAL(TX1, uart_get_TxPin(2)); -#endif - - start_serial(115200); - - log_d("Re-enabling UART loopback"); - -#if SOC_UART_HP_NUM == 2 - uart_internal_loopback(1, NEW_RX1); -#elif SOC_UART_HP_NUM >= 3 - uart_internal_loopback(1, RX1); - uart_internal_loopback(2, RX2); -#endif + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + esp_rom_gpio_connect_out_signal(config.default_rx_pin, SIG_GPIO_OUT_IDX, false, false); + } - transmit_and_check_msg("using new pins"); + log_d("Swapping UART pins and testing transmission"); + + if (TEST_UART_NUM == 1) { + UARTTestConfig &config = *uart_test_configs[0]; + config.serial.setPins(NEW_RX1, NEW_TX1); + TEST_ASSERT_EQUAL(NEW_RX1, uart_get_RxPin(config.uart_num)); + TEST_ASSERT_EQUAL(NEW_TX1, uart_get_TxPin(config.uart_num)); + + uart_internal_loopback(config.uart_num, NEW_RX1); + config.transmit_and_check_msg("using new pins"); + } else { + for (int i = 0; i < TEST_UART_NUM; i++) { + UARTTestConfig &config = *uart_test_configs[i]; + UARTTestConfig &next_uart = *uart_test_configs[(i + 1) % TEST_UART_NUM]; + config.serial.setPins(next_uart.default_rx_pin, next_uart.default_tx_pin); + TEST_ASSERT_EQUAL(uart_get_RxPin(config.uart_num), next_uart.default_rx_pin); + TEST_ASSERT_EQUAL(uart_get_TxPin(config.uart_num), next_uart.default_tx_pin); + + uart_internal_loopback(config.uart_num, next_uart.default_rx_pin); + config.transmit_and_check_msg("using new pins"); + } + } Serial.println("Change pins test successful"); } @@ -475,12 +397,15 @@ void auto_baudrate_test(void) { log_d("Stopping test serial. Using Serial2 for ESP32 and Serial1 for ESP32-S2."); -#if SOC_UART_HP_NUM == 2 - selected_serial = &Serial1; - uart_internal_loopback(0, RX1); -#elif SOC_UART_HP_NUM >= 3 - selected_serial = &Serial2; + if (TEST_UART_NUM == 1) { + selected_serial = &Serial1; + uart_internal_loopback(0, RX1); + } else { +#ifdef RX2 + selected_serial = &Serial2; + uart_internal_loopback(1, RX2); #endif + } //selected_serial->end(false); @@ -493,10 +418,10 @@ void auto_baudrate_test(void) { selected_serial->begin(0); baudrate = selected_serial->baudRate(); -#if SOC_UART_HP_NUM == 2 - Serial.end(); - Serial.begin(115200); -#endif + if (TEST_UART_NUM == 1) { + Serial.end(); + Serial.begin(115200); + } TEST_ASSERT_UINT_WITHIN(2304, 115200, baudrate); @@ -510,32 +435,23 @@ void periman_test(void) { log_d("Setting up I2C on the same pins as UART"); - Wire.begin(RX1, TX1); - -#if SOC_UART_HP_NUM >= 3 - Wire1.begin(RX2, TX2); -#endif - - recv_msg = ""; - - log_d("Trying to send message using UART with I2C enabled"); - transmit_and_check_msg("while used by I2C", false); - TEST_ASSERT_EQUAL_STRING("", recv_msg.c_str()); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + Wire.begin(config.default_rx_pin, config.default_tx_pin); + config.recv_msg = ""; - log_d("Disabling I2C and re-enabling UART"); + log_d("Trying to send message using UART%d with I2C enabled", config.uart_num); + config.transmit_and_check_msg("while used by I2C", false); + TEST_ASSERT_EQUAL_STRING("", config.recv_msg.c_str()); - Serial1.setPins(RX1, TX1); + log_d("Disabling I2C and re-enabling UART%d", config.uart_num); -#if SOC_UART_HP_NUM >= 3 - Serial2.setPins(RX2, TX2); - uart_internal_loopback(1, RX2); - uart_internal_loopback(2, RX1); -#elif SOC_UART_HP_NUM == 2 - uart_internal_loopback(1, RX1); -#endif + config.serial.setPins(config.default_rx_pin, config.default_tx_pin); + uart_internal_loopback(config.uart_num, config.default_rx_pin); - log_d("Trying to send message using UART with I2C disabled"); - transmit_and_check_msg("while I2C is disabled"); + log_d("Trying to send message using UART%d with I2C disabled", config.uart_num); + config.transmit_and_check_msg("while I2C is disabled"); + } Serial.println("Peripheral manager test successful"); } @@ -551,8 +467,11 @@ void change_cpu_frequency_test(void) { Serial.updateBaudRate(115200); - log_d("Trying to send message with the new CPU frequency"); - transmit_and_check_msg("with new CPU frequency"); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + log_d("Trying to send message with the new CPU frequency on UART%d", config.uart_num); + config.transmit_and_check_msg("with new CPU frequency"); + } log_d("Changing CPU frequency back to %dMHz", old_freq); Serial.flush(); @@ -560,8 +479,11 @@ void change_cpu_frequency_test(void) { Serial.updateBaudRate(115200); - log_d("Trying to send message with the original CPU frequency"); - transmit_and_check_msg("with the original CPU frequency"); + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + log_d("Trying to send message with the original CPU frequency on UART%d", config.uart_num); + config.transmit_and_check_msg("with the original CPU frequency"); + } Serial.println("Change CPU frequency test successful"); } @@ -573,30 +495,39 @@ void setup() { while (!Serial) { delay(10); } - log_d("SOC_UART_HP_NUM = %d", SOC_UART_HP_NUM); - - // Begin needs to be called before setting up the loopback because it creates the serial object - start_serial(115200); - -#if SOC_UART_HP_NUM == 2 - log_d("Setup internal loop-back from and back to Serial1 (UART1) TX >> Serial1 (UART1) RX"); - - Serial1.onReceive([]() { - onReceive_cb(Serial1); - }); - uart_internal_loopback(1, RX1); -#elif SOC_UART_HP_NUM >= 3 - log_d("Setup internal loop-back between Serial1 (UART1) <<--->> Serial2 (UART2)"); - - Serial1.onReceive([]() { - onReceive_cb(Serial1); - }); - Serial2.onReceive([]() { - onReceive_cb(Serial2); - }); - uart_internal_loopback(1, RX2); - uart_internal_loopback(2, RX1); + + uart_test_configs = { +#if SOC_UART_HP_NUM >= 2 && defined(RX1) && defined(TX1) + // inverting RX1<->TX1 because ESP32-P4 has a problem with loopback on RX1 :: GPIO11 <-- UART_TX SGINAL + new UARTTestConfig(1, Serial1, TX1, RX1), +#endif +#if SOC_UART_HP_NUM >= 3 && defined(RX2) && defined(TX2) + new UARTTestConfig(2, Serial2, RX2, TX2), +#endif +#if SOC_UART_HP_NUM >= 4 && defined(RX3) && defined(TX3) + new UARTTestConfig(3, Serial3, RX3, TX3), #endif +#if SOC_UART_HP_NUM >= 5 && defined(RX4) && defined(TX4) + new UARTTestConfig(4, Serial4, RX4, TX4) +#endif + }; + + if (TEST_UART_NUM == 0) { + log_e("This test requires at least one UART besides UART0 configured"); + abort(); + } + + log_d("TEST_UART_NUM = %d", TEST_UART_NUM); + + for (auto *ref : uart_test_configs) { + UARTTestConfig &config = *ref; + config.begin(115200); + log_d("Setup internal loop-back from and back to UART%d TX >> UART%d RX", config.uart_num, config.uart_num); + config.serial.onReceive([&config]() { + config.onReceive(); + }); + uart_internal_loopback(config.uart_num, uart_get_RxPin(config.uart_num)); + } log_d("Setup done. Starting tests"); diff --git a/tools/add_lib.sh b/tools/add_lib.sh index 4ec73c4f7f7..9760f8114c6 100755 --- a/tools/add_lib.sh +++ b/tools/add_lib.sh @@ -1,4 +1,5 @@ #!/bin/bash + HELP="This script help to add library when using arduino-esp32 as an ESP-IDF component The script accepts up to three arguments: -n NEW: URL address to new library on GIThub (cannot be combined with -e) @@ -26,119 +27,126 @@ n_param="" # Parse the command-line arguments using getopts while getopts "he:l:n:" opt; do - case $opt in - h) - echo "$HELP" - exit 0 - ;; - e) - #e_param="$OPTARG" - e_param="${OPTARG/#~/$HOME}" - ;; - l) - #l_param="$OPTARG" - l_param="${OPTARG/#~/$HOME}" - ;; - n) - n_param=$OPTARG - ;; - \?) - echo "Invalid option: -$OPTARG" >&2 - echo $HELP - exit 1 - ;; - :) - echo "Option -$OPTARG requires an argument." >&2 - echo $HELP - exit 1 - ;; - esac + case $opt in + h) + echo "$HELP" + exit 0 + ;; + e) + #e_param="$OPTARG" + e_param="${OPTARG/#~/$HOME}" + ;; + l) + #l_param="$OPTARG" + l_param="${OPTARG/#~/$HOME}" + ;; + n) + n_param=$OPTARG + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + echo "$HELP" + exit 1 + ;; + :) + echo "Option -$OPTARG requires an argument." >&2 + echo "$HELP" + exit 1 + ;; + *) + echo "Invalid option: -$OPTARG" >&2 + echo "$HELP" + exit 1 + ;; + esac done # No parameter check if [[ -z "$e_param" ]] && [[ -z "$l_param" ]] && [[ -z "$n_param" ]]; then - echo "Error: No parameters" >&2 - echo "$HELP" - exit 1 + echo "Error: No parameters" >&2 + echo "$HELP" + exit 1 fi # Only local path check (not permitted) -if [[ -z "$e_param" ]] && [[ ! -z "$l_param" ]] && [[ -z "$n_param" ]]; then - echo "Error: -l parameter must be paired with -e or -n" >&2 - echo "$HELP" - exit 1 +if [[ -z "$e_param" ]] && [[ -n "$l_param" ]] && [[ -z "$n_param" ]]; then + echo "Error: -l parameter must be paired with -e or -n" >&2 + echo "$HELP" + exit 1 fi # Invalid combination check -if [[ ! -z $e_param ]] && [[ ! -z $n_param ]]; then - echo "ERROR: Cannot combine -n with -e" >&2 - echo "$HELP" - exit 1 +if [[ -n $e_param ]] && [[ -n $n_param ]]; then + echo "ERROR: Cannot combine -n with -e" >&2 + echo "$HELP" + exit 1 fi # Check existing lib -if [[ ! -z "$e_param" ]]; then - if [[ ! -d "${e_param/#~/$HOME}" ]]; then # this works! - echo "Error: existing library parameter - path does not exist" >&2 - exit 1 - fi +if [[ -n "$e_param" ]]; then + if [[ ! -d "${e_param/#~/$HOME}" ]]; then # this works! + echo "Error: existing library parameter - path does not exist" >&2 + exit 1 + fi fi LIBRARY="" # Only existing library was supplied -if [[ ! -z $e_param ]] && [[ -z $l_param ]] && [[ -z $n_param ]]; then - LIBRARY=$e_param +if [[ -n $e_param ]] && [[ -z $l_param ]] && [[ -z $n_param ]]; then + LIBRARY=$e_param fi # Install new lib -if [ ! -z $n_param ]; then - INSTALL_TARGET="" - if [ -z $l_param ]; then - # If local path for project is not supplied - use as INSTALL_TARGET Arduino libraries path - INSTALL_TARGET=$ARDUINO_LIBS_PATH/$(basename "$n_param") - else - INSTALL_TARGET=$l_param/components/$(basename "$n_param") - if [ ! -d "$l_param/components" ]; then - echo "Folder components does not exist yet: mkdir -p "$l_param/components"" - mkdir -p "$l_param/components" +if [ -n "$n_param" ]; then + INSTALL_TARGET="" + if [ -z "$l_param" ]; then + # If local path for project is not supplied - use as INSTALL_TARGET Arduino libraries path + INSTALL_TARGET=$ARDUINO_LIBS_PATH/$(basename "$n_param") + else + INSTALL_TARGET=$l_param/components/$(basename "$n_param") + if [ ! -d "$l_param/components" ]; then + echo "Folder components does not exist yet: mkdir -p \"$l_param/components\"" + mkdir -p "$l_param/components" + fi fi - fi - # clone the new lib - echo "Cloning: git clone --recursive $n_param $INSTALL_TARGET" - git clone --recursive $n_param $INSTALL_TARGET - LIBRARY=$INSTALL_TARGET + # clone the new lib + echo "Cloning: git clone --recursive $n_param $INSTALL_TARGET" + git clone --recursive "$n_param" "$INSTALL_TARGET" + LIBRARY=$INSTALL_TARGET fi # Copy existing lib to local project -if [[ ! -z $e_param ]] && [[ ! -z $l_param ]]; then - if [ ! -d "$l_param/components" ]; then - echo "Folder components does not exist yet: mkdir -p "$l_param/components"" - mkdir -p "$l_param/components" - fi - echo "Copy from $e_param to $l_param" - echo "cp -r $e_param $l_param/components/$(basename "$e_param")" - cp -r $e_param $l_param/components/$(basename "$e_param") - LIBRARY=$l_param/components/$(basename "$e_param") +if [[ -n $e_param ]] && [[ -n $l_param ]]; then + if [ ! -d "$l_param/components" ]; then + echo "Folder components does not exist yet: mkdir -p \"$l_param/components\"" + mkdir -p "$l_param/components" + fi + echo "Copy from $e_param to $l_param" + echo "cp -r $e_param $l_param/components/\"$(basename "$e_param")\"" + cp -r "$e_param" "$l_param"/components/"$(basename "$e_param")" + LIBRARY=$l_param/components/"$(basename "$e_param")" fi if [ -z "$LIBRARY" ]; then - echo "ERROR: No library path" >&2 - exit 1 + echo "ERROR: No library path" >&2 + exit 1 fi # 1. get the source list: -FILES=$(find $LIBRARY -name '*.c' -o -name '*.cpp' | xargs -I{} basename {}) +FILES=$(find "$LIBRARY" -print0 -name '*.c' -o -name '*.cpp' | xargs -0 -I{} basename {}) # Fresh start -if [ -f $LIBRARY/CMakeLists.txt ]; then - rm $LIBRARY/CMakeLists.txt - touch $LIBRARY/CMakeLists.txt +if [ -f "$LIBRARY"/CMakeLists.txt ]; then + rm "$LIBRARY"/CMakeLists.txt + touch "$LIBRARY"/CMakeLists.txt fi # Generate CMakeLists.txt -echo "idf_component_register(SRCS $(echo $FILES | sed -e 's/ /" "/g' | sed -e 's/^/"/' -e 's/$/"/')" >> $LIBRARY/CMakeLists.txt -echo " INCLUDE_DIRS \".\"" >> $LIBRARY/CMakeLists.txt -echo " REQUIRES \"arduino-esp32\"" >> $LIBRARY/CMakeLists.txt -echo " )" >> $LIBRARY/CMakeLists.txt +{ + echo "idf_component_register(SRCS $(echo "$FILES" | sed -e 's/ /" "/g' | sed -e 's/^/"/' -e 's/$/"/')" + echo " INCLUDE_DIRS \".\"" + echo " REQUIRES \"arduino-esp32\"" + echo " )" +} >> "$LIBRARY"/CMakeLists.txt diff --git a/tools/gen_esp32part.exe b/tools/gen_esp32part.exe index 51c1959ac74..5bd12c6360d 100644 Binary files a/tools/gen_esp32part.exe and b/tools/gen_esp32part.exe differ diff --git a/tools/gen_esp32part.py b/tools/gen_esp32part.py index 4ba0ee59517..ffa740a36e0 100755 --- a/tools/gen_esp32part.py +++ b/tools/gen_esp32part.py @@ -529,7 +529,7 @@ def to_binary(self): def to_csv(self, simple_formatting=False): def addr_format(a, include_sizes): if not simple_formatting and include_sizes: - for (val, suffix) in [(0x100000, "M"), (0x400, "K")]: + for val, suffix in [(0x100000, "M"), (0x400, "K")]: if a % val == 0: return "%d%s" % (a // val, suffix) return "0x%x" % a diff --git a/variants/Pcbcupid_GLYPH_C3/pins_arduino.h b/variants/Pcbcupid_GLYPH_C3/pins_arduino.h new file mode 100644 index 00000000000..653c2c48828 --- /dev/null +++ b/variants/Pcbcupid_GLYPH_C3/pins_arduino.h @@ -0,0 +1,43 @@ +#ifndef Pins_Arduino_h +#define Pins_Arduino_h + +#include + +// BUILTIN_LED can be used in new Arduino API digitalWrite() like in Blink.ino +static const uint8_t LED_BUILTIN = 1; +#define BUILTIN_LED LED_BUILTIN // backward compatibility +#define LED_BUILTIN LED_BUILTIN // allow testing #ifdef LED_BUILTIN + +//MSR Used in on-board battery measurement +static const uint8_t BAT_MEASURE = 0; +#define MSR BAT_MEASURE + +static const uint8_t TX = 21; +static const uint8_t RX = 20; + +static const uint8_t SDA = 4; +static const uint8_t SCL = 5; + +static const uint8_t SS = 3; +static const uint8_t MOSI = 6; +static const uint8_t MISO = 7; +static const uint8_t SCK = 10; + +static const uint8_t A0 = 0; +static const uint8_t A1 = 1; +static const uint8_t A2 = 2; +static const uint8_t A3 = 3; + +static const uint8_t D0 = 0; +static const uint8_t D1 = 1; +static const uint8_t D2 = 2; +static const uint8_t D3 = 3; +static const uint8_t D4 = 4; +static const uint8_t D5 = 5; +static const uint8_t D6 = 6; +static const uint8_t D7 = 7; +static const uint8_t D8 = 8; +static const uint8_t D9 = 9; +static const uint8_t D10 = 10; + +#endif /* Pins_Arduino_h */ diff --git a/variants/Pcbcupid_GLYPH_C6/pins_arduino.h b/variants/Pcbcupid_GLYPH_C6/pins_arduino.h new file mode 100644 index 00000000000..f06fb151244 --- /dev/null +++ b/variants/Pcbcupid_GLYPH_C6/pins_arduino.h @@ -0,0 +1,52 @@ +#ifndef Pins_Arduino_h +#define Pins_Arduino_h + +#include + +// BUILTIN_LED can be used in new Arduino API digitalWrite() like in Blink.ino +static const uint8_t LED_BUILTIN = 14; +#define BUILTIN_LED LED_BUILTIN // backward compatibility +#define LED_BUILTIN LED_BUILTIN // allow testing #ifdef LED_BUILTIN + +//MSR Used in on-board battery measurement +static const uint8_t BAT_MEASURE = 0; +#define MSR BAT_MEASURE + +static const uint8_t TX = 16; +static const uint8_t RX = 17; + +static const uint8_t SDA = 4; +static const uint8_t SCL = 5; + +static const uint8_t SS = 20; +static const uint8_t MOSI = 22; +static const uint8_t MISO = 23; +static const uint8_t SCK = 21; + +static const uint8_t A0 = 0; +static const uint8_t A1 = 1; +static const uint8_t A2 = 2; +static const uint8_t A3 = 3; + +static const uint8_t D0 = 0; +static const uint8_t D1 = 1; +static const uint8_t D2 = 2; +static const uint8_t D3 = 3; +static const uint8_t D4 = 4; +static const uint8_t D5 = 5; +static const uint8_t D6 = 6; +static const uint8_t D7 = 7; +static const uint8_t D8 = 8; +static const uint8_t D9 = 9; +static const uint8_t D14 = 14; +static const uint8_t D15 = 15; +static const uint8_t D16 = 16; +static const uint8_t D17 = 17; +static const uint8_t D18 = 18; +static const uint8_t D19 = 19; +static const uint8_t D20 = 20; +static const uint8_t D21 = 21; +static const uint8_t D22 = 22; +static const uint8_t D23 = 23; + +#endif /* Pins_Arduino_h */ diff --git a/variants/Pcbcupid_GLYPH_H2/pins_arduino.h b/variants/Pcbcupid_GLYPH_H2/pins_arduino.h new file mode 100644 index 00000000000..20a385a9817 --- /dev/null +++ b/variants/Pcbcupid_GLYPH_H2/pins_arduino.h @@ -0,0 +1,44 @@ +#ifndef Pins_Arduino_h +#define Pins_Arduino_h + +#include + +// BUILTIN_LED can be used in new Arduino API digitalWrite() like in Blink.ino +static const uint8_t LED_BUILTIN = 0; +#define BUILTIN_LED LED_BUILTIN // backward compatibility +#define LED_BUILTIN LED_BUILTIN // allow testing #ifdef LED_BUILTIN + +//MSR Used in on-board battery measurement +static const uint8_t BAT_MEASURE = 1; +#define MSR BAT_MEASURE + +static const uint8_t TX = 24; +static const uint8_t RX = 23; + +static const uint8_t SDA = 4; +static const uint8_t SCL = 5; + +static const uint8_t SS = 3; +static const uint8_t MOSI = 22; +static const uint8_t MISO = 25; +static const uint8_t SCK = 11; + +static const uint8_t A1 = 1; +static const uint8_t A2 = 2; +static const uint8_t A3 = 3; + +static const uint8_t D0 = 0; +static const uint8_t D1 = 1; +static const uint8_t D2 = 2; +static const uint8_t D3 = 3; +static const uint8_t D4 = 4; +static const uint8_t D5 = 5; +static const uint8_t D8 = 8; +static const uint8_t D9 = 9; +static const uint8_t D10 = 10; +static const uint8_t D11 = 11; +static const uint8_t D12 = 12; +static const uint8_t D13 = 13; +static const uint8_t D14 = 14; + +#endif /* Pins_Arduino_h */