From b097ed1e7ea80d37f73e73bf20ff0d40fa894b7f Mon Sep 17 00:00:00 2001 From: SW van Heerden Date: Mon, 22 Jan 2024 08:15:41 +0200 Subject: [PATCH 1/4] chore: update change log (#6086) Description --- updates the nextnet changelog --- changelog-nextnet.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/changelog-nextnet.md b/changelog-nextnet.md index 00cae88b0e..da107fa33d 100644 --- a/changelog-nextnet.md +++ b/changelog-nextnet.md @@ -2,6 +2,23 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [1.0.0-rc.2](https://github.com/tari-project/tari/compare/v1.0.0-rc.1...v1.0.0-rc.2) (2024-01-18) + + +### Features + +* add tari address as valid string for discovering a peer ([#6075](https://github.com/tari-project/tari/issues/6075)) ([a4c5bc2](https://github.com/tari-project/tari/commit/a4c5bc2c6c08a5d09b58f13ed9acf561e55478fc)) +* make all apps non interactive ([#6049](https://github.com/tari-project/tari/issues/6049)) ([bafd7e7](https://github.com/tari-project/tari/commit/bafd7e7baadd0f8b82ca8205ec3f18342d74e92a)) +* make libtor on by default for nix builds ([#6060](https://github.com/tari-project/tari/issues/6060)) ([b5e0d06](https://github.com/tari-project/tari/commit/b5e0d0639c540177373b7faa9c2fade64581e46d)) + + +### Bug Fixes + +* fix small error in config.toml ([#6052](https://github.com/tari-project/tari/issues/6052)) ([6518a60](https://github.com/tari-project/tari/commit/6518a60dce9a4b8ace6c5cc4b1ee79045e364e0e)) +* tms validation correctly updating ([#6079](https://github.com/tari-project/tari/issues/6079)) ([34222a8](https://github.com/tari-project/tari/commit/34222a88bd1746869e67ccde9c2f7529862f3b5d)) +* wallet coinbases not validated correctly ([#6074](https://github.com/tari-project/tari/issues/6074)) ([bb66df1](https://github.com/tari-project/tari/commit/bb66df13bcf3d00082e35f7305b1fde72d4ace2a)) + + ## [1.0.0-rc.1](https://github.com/tari-project/tari/compare/v1.0.0-rc.1...v1.0.0-rc.0) (2023-12-14) From 58a131d302fd7295134c708e75a0b788205d287e Mon Sep 17 00:00:00 2001 From: Cayle Sharrock Date: Mon, 22 Jan 2024 06:15:57 +0000 Subject: [PATCH 2/4] feat: update codeowners (#6088) Marks CI/CD files as sensitive and requiring a review by a DevOps team member. Sensitive consensus needs a lead maintainer review. other base layer code requires a review by core devs. ** REQUIRES "require reviews by codeowners" setting in branch protection rules ** Breaking Changes --- - [x] None - [ ] Requires data directory on base node to be deleted - [ ] Requires hard fork - [ ] Other - Please specify --- CODEOWNERS | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index ed8f8520e0..8adc61ce3b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1,10 @@ -@stringhandler @CjS77 @sdbondi \ No newline at end of file +# CI/CD-related files require a review by the devops team +.github/**/* @tari-project/devops +scripts/**/* @tari-project/devops +CODEOWNERS @tari-project/devops + +# Consensus code requires approvals by lead maintainers +base_layer/core/src/consensus/**/* @tari-project/lead-maintainers +base_layer/core/src/**/* @tari-project/tari-core-developers +base_layer/key_manager/src/**/* @tari-project/tari-core-developers +base_layer/wallet/src/**/* @tari-project/tari-core-developers From 8f56874f6c1507f1e0784e30915772cef88e6046 Mon Sep 17 00:00:00 2001 From: "C.Lee Taylor" <47312074+leet4tari@users.noreply.github.com> Date: Thu, 25 Jan 2024 07:59:56 +0200 Subject: [PATCH 3/4] chore(fix): ci - move s3 uploads to only on release (#6094) Description Moved S3 upload job to be run only on releases. Also add job to verify all checksums and copy into one file to be gpg signing Motivation and Context Remove none release assets from been uploaded to s3, as Windows s3 upload failed often. How Has This Been Tested? Run locally and in local fork successfully. What process can a PR reviewer use to test or verify this change? Check the binary builds for any errors. Breaking Changes With limited access to daily builds in s3, don't believe there will be a visible change. - [x] None - [ ] Requires data directory on base node to be deleted - [ ] Requires hard fork - [ ] Other - Please specify --- .github/workflows/base_node_binaries.yml | 219 ++++++++++++----------- 1 file changed, 117 insertions(+), 102 deletions(-) diff --git a/.github/workflows/base_node_binaries.yml b/.github/workflows/base_node_binaries.yml index e3a52fd72d..b0aa019e03 100644 --- a/.github/workflows/base_node_binaries.yml +++ b/.github/workflows/base_node_binaries.yml @@ -18,7 +18,7 @@ name: Build Matrix of Binaries env: TBN_FILENAME: "tari_suite" - TBN_BUNDLEID_BASE: "com.tarilabs.pkg" + TBN_BUNDLE_ID_BASE: "com.tarilabs.pkg" toolchain: nightly-2023-06-04 matrix-json-file: ".github/workflows/base_node_binaries.json" CARGO_HTTP_MULTIPLEXING: false @@ -26,9 +26,6 @@ env: CARGO: cargo # CARGO_OPTIONS: "--verbose" CARGO_OPTIONS: "--release" - # Needed for S3 as a default upload location - TARI_NETWORK_DIR: testnet - S3_DEST_OVERRIDE: "" concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -55,7 +52,7 @@ jobs: # matrix_selection=$( jq -c '.[] | select( ."name" == "windows-x64" )' ${{ env.matrix-json-file }} ) # matrix_selection=$( jq -c '.[] | select( ."name" == "macos-arm64" )' ${{ env.matrix-json-file }} ) # - # buid select target images - build_enabled + # build select target images - build_enabled matrix_selection=$( jq -c '.[] | select( ."build_enabled" != false )' ${{ env.matrix-json-file }} ) # # Setup the json build matrix @@ -84,10 +81,12 @@ jobs: builds: name: Building ${{ matrix.builds.name }} on ${{ matrix.builds.runs-on }} needs: matrix-prep + outputs: + TARI_NETWORK_DIR: ${{ steps.set-tari-network.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ steps.set-tari-vars.outputs.TARI_VERSION }} strategy: fail-fast: false matrix: ${{ fromJson(needs.matrix-prep.outputs.matrix) }} - runs-on: ${{ matrix.builds.runs-on }} steps: @@ -97,6 +96,7 @@ jobs: submodules: recursive - name: Declare TestNet for tags + id: set-tari-network if: ${{ startsWith(github.ref, 'refs/tags/v') }} shell: bash run: | @@ -105,13 +105,17 @@ jobs: echo ${TARI_NETWORK_DIR} echo "TARI_NETWORK=${TARI_NETWORK}" >> $GITHUB_ENV echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_ENV + echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_OUTPUT - name: Declare Global Variables 4 GHA ${{ github.event_name }} - id: vars + id: set-tari-vars shell: bash run: | echo "VBRANCH=${{ github.ref_name }}" >> $GITHUB_ENV echo "VSHA_SHORT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + TARI_VERSION=$(awk -F ' = ' '$1 ~ /version/ { gsub(/["]/, "", $2); printf("%s",$2) }' "$GITHUB_WORKSPACE/applications/minotari_node/Cargo.toml") + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_ENV + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_OUTPUT - name: Scheduled Destination Folder Override if: ${{ github.event_name == 'schedule' && github.event.schedule == '05 00 * * *' }} @@ -244,10 +248,7 @@ jobs: run: | mkdir -p "$GITHUB_WORKSPACE${TBN_DIST}" cd "$GITHUB_WORKSPACE${TBN_DIST}" - VERSION=$(awk -F ' = ' '$1 ~ /version/ { gsub(/["]/, "", $2); printf("%s",$2) }' "$GITHUB_WORKSPACE/applications/minotari_node/Cargo.toml") - echo "VERSION=${VERSION}" >> $GITHUB_ENV - echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_ENV - BINFILE="${TBN_FILENAME}-${VERSION}-${VSHA_SHORT}-${{ matrix.builds.name }}${TBN_EXT}" + BINFILE="${TBN_FILENAME}-${TARI_VERSION}-${VSHA_SHORT}-${{ matrix.builds.name }}${TBN_EXT}" echo "BINFILE=${BINFILE}" >> $GITHUB_ENV echo "Copying files for ${BINFILE} to $(pwd)" echo "MTS_SOURCE=$(pwd)" >> $GITHUB_ENV @@ -309,56 +310,56 @@ jobs: echo "${distDirPKG}" echo "distDirPKG=${distDirPKG}" >> $GITHUB_ENV TBN_Temp=${{ env.TBN_FILENAME }} - TBN_BUNDLEID_VALID_NAME=$(echo "${TBN_Temp//_/-}") + TBN_BUNDLE_ID_VALID_NAME=$(echo "${TBN_Temp//_/-}") # Strip apple-darwin TBN_ARCH=$(echo "${${{ matrix.builds.target }}//-apple-darwin/}") pkgbuild --root /tmp/tari_testnet \ - --identifier "${{ env.TBN_BUNDLEID_BASE }}.$TBN_BUNDLEID_VALID_NAME" \ - --version "$VERSION" \ + --identifier "${{ env.TBN_BUNDLE_ID_BASE }}.${TBN_BUNDLE_ID_VALID_NAME}" \ + --version "${TARI_VERSION}" \ --install-location "/tmp/tari" \ --scripts "/tmp/tari_testnet/scripts" \ - --sign "Developer ID Installer: $MACOS_INSTALLER_ID" \ - "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg" + --sign "Developer ID Installer: ${MACOS_INSTALLER_ID}" \ + "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" echo -e "Submitting to Apple...\n\n" xcrun altool --notarize-app \ - --primary-bundle-id "${{ env.TBN_BUNDLEID_BASE }}.$TBN_BUNDLEID_VALID_NAME" \ - --username "$MACOS_NOTARIZE_USERNAME" --password "$MACOS_NOTARIZE_PASSWORD" \ - --asc-provider "$MACOS_ASC_PROVIDER" \ - --file "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg" &> notarisation.result + --primary-bundle-id "${{ env.TBN_BUNDLE_ID_BASE }}.${TBN_BUNDLE_ID_VALID_NAME}" \ + --username "${MACOS_NOTARIZE_USERNAME}" --password "${MACOS_NOTARIZE_PASSWORD}" \ + --asc-provider "${MACOS_ASC_PROVIDER}" \ + --file "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" &> notarisation.result requestUUID=`grep RequestUUID notarisation.result | cut -d" " -f 3` - echo $requestUUID - if [[ $requestUUID == "" ]]; then + echo ${requestUUID} + if [[ ${requestUUID} == "" ]]; then echo "could not upload for notarization" exit 1 else - echo "Notarization RequestUUID: $requestUUID" + echo "Notarization RequestUUID: ${requestUUID}" fi echo -e "\n\nChecking result of notarisation..." request_status="in progress" - while [[ "$request_status" == "in progress" ]]; do + while [[ "${request_status}" == "in progress" ]]; do echo -n "waiting... " sleep 10 - request_status=$(xcrun altool --notarization-info $requestUUID --username "$MACOS_NOTARIZE_USERNAME" --password "$MACOS_NOTARIZE_PASSWORD" 2>&1) - echo "$request_status" - request_status=$(echo "$request_status" | awk -F ': ' '/Status:/ { print $2; }' ) - echo "$request_status" + request_status=$(xcrun altool --notarization-info ${requestUUID} --username "${MACOS_NOTARIZE_USERNAME}" --password "${MACOS_NOTARIZE_PASSWORD}" 2>&1) + echo "${request_status}" + request_status=$(echo "${request_status}" | awk -F ': ' '/Status:/ { print $2; }' ) + echo "${request_status}" done - echo "$request_status" - if [[ $request_status != "success" ]]; then - echo "## could not notarize - $request_status - ${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg" + echo "${request_status}" + if [[ ${request_status} != "success" ]]; then + echo "## could not notarize - ${request_status} - ${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" exit 1 else - echo -e "\nStapling package...${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg\n" - xcrun stapler staple -v "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg" + echo -e "\nStapling package...${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg\n" + xcrun stapler staple -v "${distDirPKG}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" fi cd ${distDirPKG} ls -la echo "Compute pkg shasum" - ${SHARUN} "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg" \ - >> "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg.sha256" - cat "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg.sha256" + ${SHARUN} "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" \ + >> "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + cat "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" echo "Checksum verification for pkg is " - ${SHARUN} --check "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg.sha256" + ${SHARUN} --check "${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" cp -v *.pkg* ${{ env.MTS_SOURCE }} - name: Artifact upload for macOS pkg @@ -366,22 +367,22 @@ jobs: continue-on-error: true uses: actions/upload-artifact@v4 with: - name: ${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}.pkg - path: "${{ env.distDirPKG }}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.VERSION }}*.pkg*" + name: ${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg + path: "${{ env.distDirPKG }}/${{ env.TBN_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}*.pkg*" - name: Build the Windows installer shell: cmd if: startsWith(runner.os,'Windows') run: | cd buildtools - "%programfiles(x86)%\Inno Setup 6\iscc.exe" "/DMyAppVersion=${{ env.VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer" "/DMinotariSuite=${{ env.TBN_FILENAME }}" "/DTariSuitePath=${{ github.workspace }}${{ env.TBN_DIST }}" "windows_inno_installer.iss" + "%programfiles(x86)%\Inno Setup 6\iscc.exe" "/DMyAppVersion=${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer" "/DMinotariSuite=${{ env.TBN_FILENAME }}" "/DTariSuitePath=${{ github.workspace }}${{ env.TBN_DIST }}" "windows_inno_installer.iss" cd Output echo "Compute archive shasum" - ${{ env.SHARUN }} "${{ env.TBN_FILENAME }}-${{ env.VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe" >> "${{ env.TBN_FILENAME }}-${{ env.VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + ${{ env.SHARUN }} "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe" >> "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" echo "Show the shasum" - cat "${{ env.TBN_FILENAME }}-${{ env.VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" - echo "Checkum verification archive is " - ${{ env.SHARUN }} --check "${{ env.TBN_FILENAME }}-${{ env.VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + cat "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + echo "Checksum verification archive is " + ${{ env.SHARUN }} --check "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" - name: Artifact upload for Windows installer uses: actions/upload-artifact@v4 @@ -406,7 +407,7 @@ jobs: ${SHARUN} "${{ env.BINFILE }}.zip" >> "${{ env.BINFILE }}.zip.sha256" echo "Show the shasum" cat "${{ env.BINFILE }}.zip.sha256" - echo "Checkum verification archive is " + echo "Checksum verification archive is " ${SHARUN} --check "${{ env.BINFILE }}.zip.sha256" - name: Artifact upload for Archive @@ -465,76 +466,90 @@ jobs: name: ${{ env.TBN_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }} path: "${{ github.workspace }}${{ env.TBN_DIST }}/diag-utils/*.zip*" - - name: Artifact cleanup for diag-utils - continue-on-error: true - shell: bash - run: | - rm -vRf "${{ github.workspace }}${{ env.TBN_DIST }}/diag-utils/" - - - name: Artifact Windows Installer for S3 - if: startsWith(runner.os,'Windows') - continue-on-error: true - shell: bash - run: | - if [ -d "${{ github.workspace }}/buildtools/Output/" ]; then - echo "Coping Windows installer ..." - cp -v "${{ github.workspace }}/buildtools/Output/"* \ - "${{ github.workspace }}${{ env.TBN_DIST }}" - fi - - - name: Sync dist to S3 - Bash - continue-on-error: true # Don't break if s3 upload fails - if: ${{ env.AWS_SECRET_ACCESS_KEY != '' && matrix.builds.runs-on != 'self-hosted' }} - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - DEST_DIR: "${{ env.S3_DEST_OVERRIDE }}${{ env.PLATFORM_SPECIFIC_DIR }}/${{ env.TARI_NETWORK_DIR }}/" - S3CMD: "cp" - S3OPTIONS: '--recursive --exclude "*" --include "*.zip*" --include "*.pkg*" --include "*installer.exe*"' - shell: bash - run: | - echo "Starting upload ... ${{ env.MTS_SOURCE }}" - ls -al ${{ env.MTS_SOURCE }} - - aws --version - - aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ - "${{ env.MTS_SOURCE }}" \ - s3://${{ secrets.AWS_S3_BUCKET }}/${{ env.DEST_DIR }} \ - ${{ env.S3OPTIONS }} - - if [[ "${{ github.ref }}" =~ refs\/tags\/v* ]]; then - echo "Copy tags to latest s3" - aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ - "${{ env.MTS_SOURCE }}" \ - s3://${{ secrets.AWS_S3_BUCKET }}/current/${{ env.DEST_DIR }} \ - ${{ env.S3OPTIONS }} - - aws s3 rm --region ${{ secrets.AWS_REGION }} \ - s3://${{ secrets.AWS_S3_BUCKET }}/latest/${{ env.DEST_DIR }} \ - --recursive --include "*" - - aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ - "${{ env.MTS_SOURCE }}" \ - s3://${{ secrets.AWS_S3_BUCKET }}/latest/${{ env.DEST_DIR }} \ - ${{ env.S3OPTIONS }} - fi - create-release: + if: ${{ startsWith(github.ref, 'refs/tags/v') }} runs-on: ubuntu-latest needs: builds - if: ${{ startsWith(github.ref, 'refs/tags/v') }} + env: + TARI_NETWORK_DIR: ${{ needs.builds.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} + steps: - name: Download binaries uses: actions/download-artifact@v4 + with: + path: ${{ env.TBN_FILENAME }} + pattern: "${{ env.TBN_FILENAME }}*" + merge-multiple: true + + - name: Verify checksums and Prep Uploads + shell: bash + working-directory: ${{ env.TBN_FILENAME }} + run: | + # set -xo pipefail + sudo apt-get update + sudo apt-get --no-install-recommends --assume-yes install dos2unix + ls -alhtR + if [ -f "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" ] ; then + rm -fv "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" + fi + # Merge all sha256 files into one + find . -name "*.sha256" -type f -print | xargs cat >> "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" + dos2unix --quiet "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" + cat "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" + sha256sum --ignore-missing --check "${{ env.TBN_FILENAME }}-${{ env.TARI_VERSION }}.txt.sha256-unsigned" + ls -alhtR - name: Create release uses: ncipollo/release-action@v1 with: - artifacts: "tari_*/**/*" + artifacts: "${{ env.TBN_FILENAME }}*/**/*" token: ${{ secrets.GITHUB_TOKEN }} prerelease: true draft: true allowUpdates: true updateOnlyUnreleased: true replacesArtifacts: true + + - name: Sync assets to S3 + continue-on-error: true # Don't break if s3 upload fails + if: ${{ env.AWS_SECRET_ACCESS_KEY != '' && matrix.builds.runs-on != 'self-hosted' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3CMD: "cp" + S3OPTIONS: '--recursive --exclude "*" --include "*.sha256*" --include "*.zip*" --include "*.pkg*" --include "*installer.exe*"' + shell: bash + working-directory: ${{ env.TBN_FILENAME }} + run: | + echo "Upload processing ..." + ls -alhtR + echo "Clean up" + # Bash check if file with wildcards, does not work as expected + # if [ -f ${{ env.TBN_FILENAME }}*diag-utils* ] ; then + if ls ${{ env.TBN_FILENAME }}*diag-utils* > /dev/null 2>&1 ; then + rm -fv ${{ env.TBN_FILENAME }}*diag-utils* + fi + echo "Folder setup" + if ls ${{ env.TBN_FILENAME }}*linux* > /dev/null 2>&1 ; then + mkdir -p "linux/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TBN_FILENAME }}*linux* "linux/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TBN_FILENAME }}*macos* > /dev/null 2>&1 ; then + mkdir -p "osx/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TBN_FILENAME }}*macos* "osx/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TBN_FILENAME }}*windows* > /dev/null 2>&1 ; then + mkdir -p "windows/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TBN_FILENAME }}*windows* "windows/${{ env.TARI_NETWORK_DIR }}/" + fi + ls -alhtR + aws --version + echo "ls current" + aws s3 ls --region ${{ secrets.AWS_REGION }} \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ + echo "Upload current" + aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ + . \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ \ + ${{ env.S3OPTIONS }} From 0da370f574ae1a42241da39831e3bc1764d76fee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jan 2024 08:42:40 +0200 Subject: [PATCH 4/4] chore(deps): bump actions/cache from 3 to 4 (#6093) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 3 to 4.
Release notes

Sourced from actions/cache's releases.

v4.0.0

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v3...v4.0.0

v3.3.3

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v3...v3.3.3

v3.3.2

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v3...v3.3.2

v3.3.1

What's Changed

Full Changelog: https://github.com/actions/cache/compare/v3...v3.3.1

v3.3.0

What's Changed

... (truncated)

Changelog

Sourced from actions/cache's changelog.

Releases

3.0.0

  • Updated minimum runner version support from node 12 -> node 16

3.0.1

  • Added support for caching from GHES 3.5.
  • Fixed download issue for files > 2GB during restore.

3.0.2

  • Added support for dynamic cache size cap on GHES.

3.0.3

  • Fixed avoiding empty cache save when no files are available for caching. (issue)

3.0.4

  • Fixed tar creation error while trying to create tar with path as ~/ home folder on ubuntu-latest. (issue)

3.0.5

  • Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. (PR)

3.0.6

  • Fixed #809 - zstd -d: no such file or directory error
  • Fixed #833 - cache doesn't work with github workspace directory

3.0.7

  • Fixed #810 - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.

3.0.8

  • Fix zstd not working for windows on gnu tar in issues #888 and #891.
  • Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable SEGMENT_DOWNLOAD_TIMEOUT_MINS. Default is 60 minutes.

3.0.9

  • Enhanced the warning message for cache unavailablity in case of GHES.

3.0.10

  • Fix a bug with sorting inputs.
  • Update definition for restore-keys in README.md

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=3&new-version=4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c881a4febe..c8772a340f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,7 +45,7 @@ jobs: # Without restore keys, we lose the ability to get partial matches on caches, and end # up with too many cache misses. # Use a "small" suffix to use the build caches where possible, but build caches won't use this - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/registry/index @@ -85,7 +85,7 @@ jobs: # Rust-cache disables a key feature of actions/cache: restoreKeys. # Without restore keys, we lose the ability to get partial matches on caches, and end # up with too many cache misses. - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/registry/index