diff --git a/Build.cmd b/Build.cmd
index 2b5bba51f4..c3f2b30fda 100644
--- a/Build.cmd
+++ b/Build.cmd
@@ -1,3 +1,3 @@
@echo off
-call %~dp0eng\Build.cmd -restore -build -build-native %*
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\build.ps1""" -restore -build -native %*"
exit /b %ErrorLevel%
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 7a9adc4dc4..df05370f0d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -455,10 +455,7 @@ endif(WIN32)
# CLR_ADDITIONAL_LINKER_FLAGS - used for passing additional arguments to linker
# CLR_ADDITIONAL_COMPILER_OPTIONS - used for passing additional arguments to compiler
-#
-# For example:
-# ./build-native.sh cmakeargs "-DCLR_ADDITIONAL_COMPILER_OPTIONS=<...>" cmakeargs "-DCLR_ADDITIONAL_LINKER_FLAGS=<...>"
-#
+
if(CLR_CMAKE_PLATFORM_UNIX)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CLR_ADDITIONAL_LINKER_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CLR_ADDITIONAL_LINKER_FLAGS}" )
diff --git a/NuGet.config b/NuGet.config
index 04240c3996..ce7270fafd 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -6,6 +6,7 @@
+
diff --git a/Restore.cmd b/Restore.cmd
index cb0cb90b11..3de9ae405f 100644
--- a/Restore.cmd
+++ b/Restore.cmd
@@ -1,3 +1,3 @@
@echo off
-call %~dp0eng\Build.cmd -restore %*
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\build.ps1""" -restore %*"
exit /b %ErrorLevel%
diff --git a/Test.cmd b/Test.cmd
index ef857c81d3..49088cc7c2 100644
--- a/Test.cmd
+++ b/Test.cmd
@@ -1,3 +1,3 @@
@echo off
-call %~dp0eng\Build.cmd -test %*
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\build.ps1""" -test %*"
exit /b %ErrorLevel%
diff --git a/build.sh b/build.sh
index 3bbc31edfb..3177ba812a 100755
--- a/build.sh
+++ b/build.sh
@@ -13,4 +13,4 @@ while [[ -h $source ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-"$scriptroot/eng/build.sh" --restore --build --build-native $@
+"$scriptroot/eng/build.sh" --restore --build --native $@
diff --git a/eng/Build-Native.cmd b/eng/Build-Native.cmd
index 52f620f1d4..e8f4082386 100644
--- a/eng/Build-Native.cmd
+++ b/eng/Build-Native.cmd
@@ -22,24 +22,6 @@ if defined VS160COMNTOOLS (
set __VSVersion=vs2017
)
-:: Work around Jenkins CI + msbuild problem: Jenkins sometimes creates very large environment
-:: variables, and msbuild can't handle environment blocks with such large variables. So clear
-:: out the variables that might be too large.
-set ghprbCommentBody=
-
-:: Note that the msbuild project files (specifically, dir.proj) will use the following variables, if set:
-:: __BuildArch -- default: x64
-:: __BuildType -- default: Debug
-:: __BuildOS -- default: Windows_NT
-:: __ProjectDir -- default: directory of the dir.props file
-:: __SourceDir -- default: %__ProjectDir%\src\
-:: __RootBinDir -- default: %__ProjectDir%\artifacts\
-:: __IntermediatesDir -- default: %__RootBinDir%\obj\%__BuildOS%.%__BuildArch.%__BuildType%\
-:: __BinDir -- default: %__RootBinDir%\bin\%__BuildOS%.%__BuildArch.%__BuildType%\
-:: __LogDir -- default: %__RootBinDir%\log\%__BuildOS%.%__BuildArch.%__BuildType%\
-::
-:: Thus, these variables are not simply internal to this script!
-
:: Set the default arguments for build
set __BuildArch=x64
@@ -47,12 +29,9 @@ if /i "%PROCESSOR_ARCHITECTURE%" == "amd64" set __BuildArch=x64
if /i "%PROCESSOR_ARCHITECTURE%" == "x86" set __BuildArch=x86
set __BuildType=Debug
set __BuildOS=Windows_NT
-set __Build=0
-set __Test=0
+set __Build=1
set __CI=0
-set __DailyTest=
set __Verbosity=minimal
-set __TestArgs=
set __BuildCrossArch=0
set __CrossArch=
@@ -63,7 +42,7 @@ if %__ProjectDir:~-1%==\ set "__ProjectDir=%__ProjectDir:~0,-1%"
set "__ProjectDir=%__ProjectDir%\.."
set "__SourceDir=%__ProjectDir%\src"
-:: __UnprocessedBuildArgs are args that we pass to msbuild (e.g. /p:__BuildArch=x64)
+:: __UnprocessedBuildArgs are args that we pass to msbuild (e.g. /p:OfficialBuildId=xxxxxx)
set "__args=%*"
set processedArgs=
set __UnprocessedBuildArgs=
@@ -76,18 +55,16 @@ if /i "%1" == "-h" goto Usage
if /i "%1" == "-help" goto Usage
if /i "%1" == "--help" goto Usage
-if /i "%1" == "-build-native" (set __Build=1&set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
-if /i "%1" == "-test" (set __Test=1&set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
-if /i "%1" == "-daily-test" (set __DailyTest=-DailyTest&set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-configuration" (set __BuildType=%2&set processedArgs=!processedArgs! %1 %2&shift&shift&goto Arg_Loop)
if /i "%1" == "-architecture" (set __BuildArch=%2&set processedArgs=!processedArgs! %1 %2&shift&shift&goto Arg_Loop)
if /i "%1" == "-verbosity" (set __Verbosity=%2&set processedArgs=!processedArgs! %1 %2&shift&shift&goto Arg_Loop)
-:: These options are passed on to the common build script when testing
-if /i "%1" == "-ci" (set __CI=1&set __TestArgs=!__TestArgs! %1&set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
-if /i "%1" == "-solution" (set __TestArgs=!__TestArgs! %1 %2&set processedArgs=!processedArgs! %1&shift&shift&goto Arg_Loop)
+if /i "%1" == "-ci" (set __CI=1&set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
+
:: These options are ignored for a native build
+if /i "%1" == "-clean" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-build" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-rebuild" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
+if /i "%1" == "-test" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-sign" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-restore" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
if /i "%1" == "-pack" (set processedArgs=!processedArgs! %1&shift&goto Arg_Loop)
@@ -332,8 +309,8 @@ REM Copy the native SOS binaries to where these tools expect for testing
set "__dotnet_sos=%__RootBinDir%\bin\dotnet-sos\%__BuildType%\netcoreapp2.1\publish\win-%__BuildArch%"
set "__dotnet_dump=%__RootBinDir%\bin\dotnet-dump\%__BuildType%\netcoreapp2.1\publish\win-%__BuildArch%"
-xcopy /y /q /i /s %__BinDir% %__dotnet_sos%
-xcopy /y /q /i /s %__BinDir% %__dotnet_dump%
+xcopy /y /q /i %__BinDir% %__dotnet_sos%
+xcopy /y /q /i %__BinDir% %__dotnet_dump%
REM =========================================================================================
REM ===
@@ -344,19 +321,6 @@ REM ============================================================================
echo %__MsgPrefix%Repo successfully built. Finished at %TIME%
echo %__MsgPrefix%Product binaries are available at !__BinDir!
-if /i %__BuildCrossArch% EQU 1 goto Done
-
-:: Test components
-if %__Test% EQU 1 (
- :: Install the other versions of .NET Core runtime we are going to test on
- powershell -ExecutionPolicy ByPass -NoProfile -command "& """%__ProjectDir%\eng\install-test-runtimes.ps1""" -DotNetDir %__ProjectDir%\.dotnet -TempDir %__IntermediatesDir% -BuildArch %__BuildArch%" %__DailyTest%
-
- :: Run the xunit tests
- powershell -ExecutionPolicy ByPass -NoProfile -command "& """%__ProjectDir%\eng\common\Build.ps1""" -test -configuration %__BuildType% -verbosity %__Verbosity% %__TestArgs%"
- exit /b !ERRORLEVEL!
-)
-
-:Done
exit /b 0
REM =========================================================================================
@@ -375,9 +339,6 @@ echo.
echo All arguments are optional. The options are:
echo.
echo.-? -h -help --help: view this message.
-echo -build-native - build native components
-echo -test - test components
-echo -daily-test - test components for daily build job
echo -architecture
echo -configuration
echo -verbosity
diff --git a/eng/Build.cmd b/eng/Build.cmd
deleted file mode 100644
index ae3e464daa..0000000000
--- a/eng/Build.cmd
+++ /dev/null
@@ -1,17 +0,0 @@
-@echo off
-setlocal
-
-:: remove the -test and -build-native options and pass it to build-native.cmd
-set __args="%*"
-set __args=%__args:-build-native=%
-set __args=%__args:-daily-test=%
-set __args=%__args:-test=%
-if %__args% == "" set __args=
-
-:: build managed components
-powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0common\Build.ps1""" %__args%"
-if NOT '%ERRORLEVEL%' == '0' (exit /b %ERRORLEVEL%)
-
-:: build native componments and test managed/native
-call %~dp0build-native.cmd %*
-exit /b %ERRORLEVEL%
diff --git a/eng/CIBuild.cmd b/eng/CIBuild.cmd
index 6ed2ac8415..6d4c7c988a 100644
--- a/eng/CIBuild.cmd
+++ b/eng/CIBuild.cmd
@@ -1,3 +1,3 @@
@echo off
-call %~dp0Build.cmd -restore -build -build-native -test -publish -ci %*
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" -restore -build -native -test -publish -ci %*"
exit /b %ErrorLevel%
diff --git a/eng/InstallRuntimes.proj b/eng/InstallRuntimes.proj
new file mode 100644
index 0000000000..2d39fd323d
--- /dev/null
+++ b/eng/InstallRuntimes.proj
@@ -0,0 +1,109 @@
+
+
+
+
+ false
+ $(Platform)
+ x64
+ -architecture $(BuildArch)
+ $(DotNetRoot)Debugger.Tests.Versions.txt
+ $(DotNetRoot)x86\Debugger.Tests.Versions.txt
+
+
+
+
+
+ powershell -NonInteractive -ExecutionPolicy ByPass -NoProfile -command
+ '$(RepositoryEngineeringDir)common\dotnet-install.ps1'
+
+
+
+
+ $(RepositoryEngineeringDir)common/dotnet-install.sh
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(DailyTest)
+ $(MicrosoftNETCoreApp21Version)
+ $(MicrosoftAspNetCoreApp21Version)
+ $(MicrosoftNETCoreApp30Version)
+ $(MicrosoftAspNetCoreApp30Version)
+ $(MicrosoftNETCoreApp31Version)
+ $(MicrosoftAspNetCoreApp31Version)
+ $(MicrosoftNETCoreAppVersion)
+ $(MicrosoftAspNetCoreAppRefVersion)
+
+]]>
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 7d7c1f7fdc..3e728283f5 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -10,9 +10,9 @@
-
+
https://github.com/dotnet/arcade
- 4953cdf6c956adf69a0050c4df503048aed6b271
+ 71ce4c736b882e6112b395a0e92313be5dcb4328
https://github.com/dotnet/core-setup
diff --git a/eng/Versions.props b/eng/Versions.props
index 886f4afbc8..41e12987eb 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -6,11 +6,22 @@
3.0.0
true
true
+
+
+
+ 3.0.100
+
+ 2.1.12
+ $(MicrosoftNETCoreApp21Version)
+ 3.0.0
+ $(MicrosoftNETCoreApp30Version)
+ 3.1.0
+ $(MicrosoftNETCoreApp31Version)
5.0.0-alpha.1.19564.1
5.0.0-alpha1.19605.16
-
+
false
true
true
diff --git a/eng/build-native.sh b/eng/build-native.sh
deleted file mode 100755
index 5035a8ef7d..0000000000
--- a/eng/build-native.sh
+++ /dev/null
@@ -1,485 +0,0 @@
-#!/usr/bin/env bash
-# Copyright (c) .NET Foundation and contributors. All rights reserved.
-# Licensed under the MIT license. See LICENSE file in the project root for full license information.
-
-# Obtain the location of the bash script to figure out where the root of the repo is.
-source="${BASH_SOURCE[0]}"
-
-# Resolve $source until the file is no longer a symlink
-while [[ -h "$source" ]]; do
- scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
- source="$(readlink "$source")"
- # if $source was a relative symlink, we need to resolve it relative to the path where the
- # symlink file was located
- [[ $source != /* ]] && source="$scriptroot/$source"
-done
-__ProjectRoot="$( cd -P "$( dirname "$source" )/.." && pwd )"
-
-__BuildOS=Linux
-__HostOS=Linux
-__BuildArch=x64
-__HostArch=x64
-__BuildType=Debug
-__PortableBuild=1
-__ExtraCmakeArgs=""
-__ClangMajorVersion=0
-__ClangMinorVersion=0
-__CrossBuild=false
-__NumProc=1
-__Build=false
-__Test=false
-__DailyTest=
-__CI=false
-__Verbosity=minimal
-__TestArgs=
-__UnprocessedBuildArgs=
-__Alpine=false
-
-usage()
-{
- echo "Usage: $0 [options]"
- echo "--build-native - build native components"
- echo "--test - test native components"
- echo "--daily-test - test native components for daily build job"
- echo "--architecture "
- echo "--configuration "
- echo "--rootfs "
- echo "--stripsymbols - strip symbols into .dbg files"
- echo "--clangx.y - optional argument to build using clang version x.y"
- echo "--ci - CI lab build"
- echo "--verbosity "
- echo "--help - this help message"
- exit 1
-}
-
-# args:
-# input - $1
-to_lowercase() {
- #eval $invocation
-
- echo "$1" | tr '[:upper:]' '[:lower:]'
- return 0
-}
-
-# Argument types supported by this script:
-#
-# Build architecture - valid values are: x64, x86, arm, armel, arm64
-# Build Type - valid values are: debug, release
-#
-# Set the default arguments for build
-
-# Use uname to determine what the CPU is.
-CPUName=$(uname -p)
-# Some Linux platforms report unknown for platform, but the arch for machine.
-if [ "$CPUName" == "unknown" ]; then
- CPUName=$(uname -m)
-fi
-
-case $CPUName in
- i686)
- echo "Unsupported CPU $CPUName detected, build might not succeed!"
- __BuildArch=x86
- __HostArch=x86
- ;;
-
- x86_64)
- __BuildArch=x64
- __HostArch=x64
- ;;
-
- armv7l)
- echo "Unsupported CPU $CPUName detected, build might not succeed!"
- __BuildArch=arm
- __HostArch=arm
- ;;
-
- aarch64)
- __BuildArch=arm64
- __HostArch=arm64
- ;;
-
- *)
- echo "Unknown CPU $CPUName detected, configuring as if for x64"
- __BuildArch=x64
- __HostArch=x64
- ;;
-esac
-
-# Use uname to determine what the OS is.
-OSName=$(uname -s)
-case $OSName in
- Linux)
- __BuildOS=Linux
- __HostOS=Linux
- ;;
-
- Darwin)
- __BuildOS=OSX
- __HostOS=OSX
- ;;
-
- FreeBSD)
- __BuildOS=FreeBSD
- __HostOS=FreeBSD
- ;;
-
- OpenBSD)
- __BuildOS=OpenBSD
- __HostOS=OpenBSD
- ;;
-
- NetBSD)
- __BuildOS=NetBSD
- __HostOS=NetBSD
- ;;
-
- SunOS)
- __BuildOS=SunOS
- __HostOS=SunOS
- ;;
-
- *)
- echo "Unsupported OS $OSName detected, configuring as if for Linux"
- __BuildOS=Linux
- __HostOS=Linux
- ;;
-esac
-
-while :; do
- if [ $# -le 0 ]; then
- break
- fi
-
- lowerI="$(to_lowercase "$1")"
- case $lowerI in
- -\?|-h|--help)
- usage
- exit 1
- ;;
-
- --build-native)
- __Build=true
- ;;
-
- # Passed to common build script when testing
- --test)
- __Test=true
- ;;
-
- --daily-test)
- __DailyTest="--daily-test"
- ;;
-
- --ci)
- __CI=true
- __TestArgs="$__TestArgs $1"
- ;;
-
- --projects)
- __TestArgs="$__TestArgs $1 $2"
- shift
- ;;
-
- --verbosity)
- __Verbosity=$2
- __TestArgs="$__TestArgs $1 $2"
- shift
- ;;
-
- --configuration)
- __BuildType="$(to_lowercase "$2")"
- shift
- ;;
-
- --architecture)
- __BuildArch="$(to_lowercase "$2")"
- shift
- ;;
-
- --rootfs)
- export ROOTFS_DIR="$2"
- shift
- ;;
-
- --portablebuild=false)
- __PortableBuild=0
- ;;
-
- --stripsymbols)
- __ExtraCmakeArgs="$__ExtraCmakeArgs -DSTRIP_SYMBOLS=true"
- ;;
-
- --clang3.5)
- __ClangMajorVersion=3
- __ClangMinorVersion=5
- ;;
-
- --clang3.6)
- __ClangMajorVersion=3
- __ClangMinorVersion=6
- ;;
-
- --clang3.7)
- __ClangMajorVersion=3
- __ClangMinorVersion=7
- ;;
-
- --clang3.8)
- __ClangMajorVersion=3
- __ClangMinorVersion=8
- ;;
-
- --clang3.9)
- __ClangMajorVersion=3
- __ClangMinorVersion=9
- ;;
-
- --clang4.0)
- __ClangMajorVersion=4
- __ClangMinorVersion=0
- ;;
-
- --clang5.0)
- __ClangMajorVersion=5
- __ClangMinorVersion=0
- ;;
-
- # Ignored for a native build
- --build|--rebuild|--sign|--restore|--pack|--publish|--preparemachine)
- ;;
-
- *)
- __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
- ;;
- esac
-
- shift
-done
-
-if [ "$__BuildType" == "release" ]; then
- __BuildType=Release
-fi
-if [ "$__BuildType" == "debug" ]; then
- __BuildType=Debug
-fi
-
-# Needs to be set for generate version source file/msbuild
-if [[ -z $NUGET_PACKAGES ]]; then
- if [[ $__CI == true ]]; then
- export NUGET_PACKAGES="$__ProjectRoot/.packages"
- else
- export NUGET_PACKAGES="$HOME/.nuget/packages"
- fi
-fi
-
-echo $NUGET_PACKAGES
-
-__RootBinDir=$__ProjectRoot/artifacts
-__BinDir=$__RootBinDir/bin/$__BuildOS.$__BuildArch.$__BuildType
-__LogDir=$__RootBinDir/log/$__BuildOS.$__BuildArch.$__BuildType
-__IntermediatesDir=$__RootBinDir/obj/$__BuildOS.$__BuildArch.$__BuildType
-__ResultsDir=$__RootBinDir/TestResults/$__BuildType
-__PackagesBinDir=$__RootBinDir/packages/$__BuildType/Shipping
-__ExtraCmakeArgs="$__ExtraCmakeArgs -DCLR_MANAGED_BINARY_DIR=$__RootBinDir/bin -DCLR_BUILD_TYPE=$__BuildType"
-__DotNetCli=$__ProjectRoot/.dotnet/dotnet
-
-if [ ! -e $__DotNetCli ]; then
- echo "dotnet cli not installed $__DotNetCli"
- exit 1
-fi
-
-# Specify path to be set for CMAKE_INSTALL_PREFIX.
-# This is where all built native libraries will copied to.
-export __CMakeBinDir="$__BinDir"
-
-# Set default clang version
-if [[ $__ClangMajorVersion == 0 && $__ClangMinorVersion == 0 ]]; then
- if [[ "$__BuildArch" == "arm" || "$__BuildArch" == "armel" ]]; then
- __ClangMajorVersion=5
- __ClangMinorVersion=0
- else
- __ClangMajorVersion=3
- __ClangMinorVersion=9
- fi
-fi
-
-if [[ "$__BuildArch" == "armel" ]]; then
- # Armel cross build is Tizen specific and does not support Portable RID build
- __PortableBuild=0
-fi
-
-# Configure environment if we are doing a cross compile.
-if [ "${__BuildArch}" != "${__HostArch}" ]; then
- __CrossBuild=true
- export CROSSCOMPILE=1
- if ! [[ -n "$ROOTFS_DIR" ]]; then
- echo "ERROR: ROOTFS_DIR not set for cross build"
- exit 1
- fi
- echo "ROOTFS_DIR: $ROOTFS_DIR"
-fi
-
-mkdir -p "$__IntermediatesDir"
-mkdir -p "$__LogDir"
-mkdir -p "$__CMakeBinDir"
-
-build_native()
-{
- platformArch="$1"
- intermediatesForBuild="$2"
- extraCmakeArguments="$3"
-
- # All set to commence the build
- echo "Commencing $__DistroRid build for $__BuildOS.$__BuildArch.$__BuildType in $intermediatesForBuild"
-
- generator=""
- buildFile="Makefile"
- buildTool="make"
- scriptDir="$__ProjectRoot/eng"
-
- pushd "$intermediatesForBuild"
- echo "Invoking \"$scriptDir/gen-buildsys-clang.sh\" \"$__ProjectRoot\" $__ClangMajorVersion \"$__ClangMinorVersion\" $platformArch "$scriptDir" $__BuildType $generator $extraCmakeArguments"
- "$scriptDir/gen-buildsys-clang.sh" "$__ProjectRoot" $__ClangMajorVersion "$__ClangMinorVersion" $platformArch "$scriptDir" $__BuildType $generator "$extraCmakeArguments"
- popd
-
- if [ ! -f "$intermediatesForBuild/$buildFile" ]; then
- echo "Failed to generate build project!"
- exit 1
- fi
-
- # Check that the makefiles were created.
- pushd "$intermediatesForBuild"
-
- echo "Executing $buildTool install -j $__NumProc"
-
- $buildTool install -j $__NumProc | tee $__LogDir/make.log
- if [ $? != 0 ]; then
- echo "Failed to build."
- exit 1
- fi
-
- popd
-}
-
-initTargetDistroRid()
-{
- source "$__ProjectRoot/eng/init-distro-rid.sh"
-
- local passedRootfsDir=""
-
- # Only pass ROOTFS_DIR if cross is specified.
- if [ $__CrossBuild == true ]; then
- passedRootfsDir=${ROOTFS_DIR}
- fi
-
- initDistroRidGlobal ${__BuildOS} ${__BuildArch} ${__PortableBuild} ${passedRootfsDir}
-}
-
-# Init the target distro name
-initTargetDistroRid
-
-
-echo "RID: $__DistroRid"
-
-if [ "$__HostOS" == "OSX" ]; then
- export LLDB_H=$__ProjectRoot/src/SOS/lldbplugin/swift-4.0
- export LLDB_LIB=/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/LLDB
- export LLDB_PATH=/Applications/Xcode.app/Contents/Developer/usr/bin/lldb
-
- export MACOSX_DEPLOYMENT_TARGET=10.12
-
- # If Xcode 9.2 exists (like on the CI/build machines), use that. Xcode 9.3 or
- # greater (swift 4.1 lldb) doesn't work that well (seg faults on exit).
- if [ -f "/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb" ]; then
- if [ -f "/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB" ]; then
- export LLDB_PATH=/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb
- export LLDB_LIB=/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB
- fi
- fi
-
- if [ ! -f $LLDB_LIB ]; then
- echo "Cannot find the lldb library. Try installing Xcode."
- exit 1
- fi
-
- # Workaround bad python version in /usr/local/bin/python2.7 on lab machines
- export PATH=/usr/bin:$PATH
- which python
- python --version
-fi
-
-# Build native components
-if [ $__Build == true ]; then
- if [[ $__CI == true ]]; then
- echo "Generating Version Source File"
- __GenerateVersionRestoreLog="$__LogDir/GenerateVersionRestore.binlog"
- $__DotNetCli msbuild $__ProjectRoot/eng/CreateVersionFile.csproj /v:$__Verbosity /bl:$__GenerateVersionRestoreLog /t:Restore /p:Configuration="$__BuildType" /p:Platform="$__BuildArch" $__UnprocessedBuildArgs
-
- __GenerateVersionLog="$__LogDir/GenerateVersion.binlog"
- $__DotNetCli msbuild $__ProjectRoot/eng/CreateVersionFile.csproj /v:$__Verbosity /bl:$__GenerateVersionLog /t:GenerateVersionFiles /p:GenerateVersionSourceFile=true /p:NativeVersionSourceFile="$__IntermediatesDir/version.cpp" /p:Configuration="$__BuildType" /p:Platform="$__BuildArch" $__UnprocessedBuildArgs
- if [ $? != 0 ]; then
- echo "Generating Version Source File FAILED"
- exit 1
- fi
- else
- echo "Generating Empty Version Source File"
- echo "" > "$__IntermediatesDir/version.cpp"
- fi
-
- build_native "$__BuildArch" "$__IntermediatesDir" "$__ExtraCmakeArgs"
-fi
-
-if [[ $__Build == true || $__Test == true ]]; then
- # Copy the native SOS binaries to where these tools expect for testing
- __dotnet_sos=$__RootBinDir/bin/dotnet-sos/$__BuildType/netcoreapp2.1/publish/$__DistroRid
- __dotnet_dump=$__RootBinDir/bin/dotnet-dump/$__BuildType/netcoreapp2.1/publish/$__DistroRid
-
- mkdir -p "$__dotnet_sos"
- mkdir -p "$__dotnet_dump"
-
- cp "$__BinDir"/* "$__dotnet_sos"
- echo "Copied SOS to $__dotnet_sos"
-
- cp "$__BinDir"/* "$__dotnet_dump"
- echo "Copied SOS to $__dotnet_dump"
-fi
-
-# Run SOS/lldbplugin tests
-if [ $__Test == true ]; then
- if [ $__CrossBuild != true ]; then
-
- # Install the other versions of .NET Core runtime we are going to test on
- "$__ProjectRoot/eng/install-test-runtimes.sh" --dotnet-directory "$__ProjectRoot/.dotnet" --temp-directory "$__IntermediatesDir" --architecture "$__BuildArch" $__DailyTest
-
- if [ "$LLDB_PATH" == "" ]; then
- export LLDB_PATH="$(which lldb-3.9.1 2> /dev/null)"
- if [ "$LLDB_PATH" == "" ]; then
- export LLDB_PATH="$(which lldb-3.9 2> /dev/null)"
- if [ "$LLDB_PATH" == "" ]; then
- export LLDB_PATH="$(which lldb-4.0 2> /dev/null)"
- if [ "$LLDB_PATH" == "" ]; then
- export LLDB_PATH="$(which lldb-5.0 2> /dev/null)"
- if [ "$LLDB_PATH" == "" ]; then
- export LLDB_PATH="$(which lldb 2> /dev/null)"
- fi
- fi
- fi
- fi
- fi
-
- if [ "$GDB_PATH" == "" ]; then
- export GDB_PATH="$(which gdb 2> /dev/null)"
- fi
-
- echo "lldb: '$LLDB_PATH' gdb: '$GDB_PATH'"
-
- # Run xunit SOS tests
- "$__ProjectRoot/eng/common/build.sh" --test --configuration "$__BuildType" $__TestArgs
- if [ $? != 0 ]; then
- exit 1
- fi
- fi
-fi
-
-echo "BUILD: Repo sucessfully built."
-echo "BUILD: Product binaries are available at $__CMakeBinDir"
diff --git a/eng/build.ps1 b/eng/build.ps1
new file mode 100644
index 0000000000..b3e5407270
--- /dev/null
+++ b/eng/build.ps1
@@ -0,0 +1,74 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string][Alias('c')] $configuration = "Debug",
+ [string] $architecture = "",
+ [string][Alias('v')] $verbosity = "minimal",
+ [switch][Alias('t')] $test,
+ [switch] $ci,
+ [switch] $native,
+ [switch] $dailytest,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]] $remainingargs
+)
+
+Set-StrictMode -Version Latest
+$ErrorActionPreference = "Stop"
+
+function Get-Architecture([string]$arch) {
+ switch ($arch.ToLower()) {
+ { $_ -eq "" } { return Get-Architecture($env:PROCESSOR_ARCHITECTURE) }
+ { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
+ { $_ -eq "x86" } { return "x86" }
+ { $_ -eq "arm" } { return "arm" }
+ { $_ -eq "arm64" } { return "arm64" }
+ default { throw "Architecture not supported." }
+ }
+}
+
+$architecture = Get-Architecture($architecture)
+
+$crossbuild = $false
+if (($architecture -eq "arm") -or ($architecture -eq "arm64")) {
+ $processor = Get-Architecture($env:PROCESSOR_ARCHITECTURE)
+ if ($architecture -ne $processor) {
+ $crossbuild = $true
+ }
+}
+
+switch ($configuration.ToLower()) {
+ { $_ -eq "debug" } { $configuration = "Debug" }
+ { $_ -eq "release" } { $configuration = "Release" }
+}
+
+$reporoot = Join-Path $PSScriptRoot ".."
+$engroot = Join-Path $reporoot "eng"
+$artifactsdir = Join-Path $reporoot "artifacts"
+$logdir = Join-Path $artifactsdir "log"
+$logdir = Join-Path $logdir Windows_NT.$architecture.$configuration
+
+if ($ci) {
+ $remainingargs = $remainingargs + "-ci"
+}
+
+# Install sdk for building, restore and build managed components.
+Invoke-Expression "& `"$engroot\common\build.ps1`" -configuration $configuration -verbosity $verbosity /p:TestArchitectures=$architecture $remainingargs"
+if ($lastExitCode -ne 0) {
+ exit $lastExitCode
+}
+
+# Build native components
+if ($native) {
+ Invoke-Expression "& `"$engroot\Build-Native.cmd`" -architecture $architecture -configuration $configuration -verbosity $verbosity $remainingargs"
+ if ($lastExitCode -ne 0) {
+ exit $lastExitCode
+ }
+}
+
+# Run the xunit tests
+if ($test -or $dailytest) {
+ if (-not $crossbuild) {
+ & "$engroot\common\build.ps1" -test -configuration $configuration -ci:$ci -verbosity $verbosity /bl:$logdir\Test.binlog /p:TestArchitectures=$architecture /p:BuildArch=$architecture /p:DailyTest=$dailyTest
+ if ($lastExitCode -ne 0) {
+ exit $lastExitCode
+ }
+ }
+}
diff --git a/eng/build.sh b/eng/build.sh
index bb87448d67..f1fe971856 100755
--- a/eng/build.sh
+++ b/eng/build.sh
@@ -2,63 +2,514 @@
# Copyright (c) .NET Foundation and contributors. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+# Obtain the location of the bash script to figure out where the root of the repo is.
source="${BASH_SOURCE[0]}"
-# resolve $SOURCE until the file is no longer a symlink
-while [[ -h $source ]]; do
+# Resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
-
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
+__ProjectRoot="$( cd -P "$( dirname "$source" )/.." && pwd )"
-scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+__BuildOS=Linux
+__HostOS=Linux
+__BuildArch=x64
+__HostArch=x64
+__BuildType=Debug
+__PortableBuild=1
+__ExtraCmakeArgs=""
+__ClangMajorVersion=0
+__ClangMinorVersion=0
+__NumProc=1
+__Build=false
+__NativeBuild=false
+__CrossBuild=false
+__Test=false
+__DailyTest=false
+__CI=false
+__Verbosity=minimal
+__ManagedBuildArgs=
+__TestArgs=
+__UnprocessedBuildArgs=
+
+usage()
+{
+ echo "Usage: $0 [options]"
+ echo "--build - build managed components"
+ echo "--native - build native components"
+ echo "--test - run xunit tests"
+ echo "--dailytest - test components for daily build job"
+ echo "--architecture "
+ echo "--configuration "
+ echo "--rootfs "
+ echo "--stripsymbols - strip symbols into .dbg files"
+ echo "--clangx.y - optional argument to build using clang version x.y"
+ echo "--ci - CI lab build"
+ echo "--verbosity "
+ echo "--help - this help message"
+ exit 1
+}
# args:
# input - $1
-function ToLowerCase() {
+to_lowercase() {
+ #eval $invocation
+
echo "$1" | tr '[:upper:]' '[:lower:]'
return 0
}
-buildargs=
-buildnativeargs=$@
+# Argument types supported by this script:
+#
+# Build architecture - valid values are: x64, x86, arm, armel, arm64
+# Build Type - valid values are: debug, release
+#
+# Set the default arguments for build
+
+# Use uname to determine what the CPU is.
+CPUName=$(uname -p)
+# Some Linux platforms report unknown for platform, but the arch for machine.
+if [ "$CPUName" == "unknown" ]; then
+ CPUName=$(uname -m)
+fi
+
+case $CPUName in
+ i686)
+ echo "Unsupported CPU $CPUName detected, build might not succeed!"
+ __BuildArch=x86
+ __HostArch=x86
+ ;;
+
+ x86_64)
+ __BuildArch=x64
+ __HostArch=x64
+ ;;
+
+ armv7l)
+ echo "Unsupported CPU $CPUName detected, build might not succeed!"
+ __BuildArch=arm
+ __HostArch=arm
+ ;;
+
+ aarch64)
+ __BuildArch=arm64
+ __HostArch=arm64
+ ;;
+
+ *)
+ echo "Unknown CPU $CPUName detected, configuring as if for x64"
+ __BuildArch=x64
+ __HostArch=x64
+ ;;
+esac
+
+# Use uname to determine what the OS is.
+OSName=$(uname -s)
+case $OSName in
+ Linux)
+ __BuildOS=Linux
+ __HostOS=Linux
+ ;;
+
+ Darwin)
+ __BuildOS=OSX
+ __HostOS=OSX
+ ;;
+
+ FreeBSD)
+ __BuildOS=FreeBSD
+ __HostOS=FreeBSD
+ ;;
+
+ OpenBSD)
+ __BuildOS=OpenBSD
+ __HostOS=OpenBSD
+ ;;
+
+ NetBSD)
+ __BuildOS=NetBSD
+ __HostOS=NetBSD
+ ;;
+
+ SunOS)
+ __BuildOS=SunOS
+ __HostOS=SunOS
+ ;;
+
+ *)
+ echo "Unsupported OS $OSName detected, configuring as if for Linux"
+ __BuildOS=Linux
+ __HostOS=Linux
+ ;;
+esac
-# Parse command line options
while :; do
if [ $# -le 0 ]; then
break
fi
- lowerI="$(ToLowerCase "$1")"
- case $lowerI in
- --architecture)
+ # support both "--" and "-" options
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case $opt in
+ -\?|-h|-help)
+ usage
+ exit 1
+ ;;
+
+ -build|-b)
+ __Build=true
+ ;;
+
+ -native)
+ __NativeBuild=true
+ ;;
+
+ -test|-t)
+ __Test=true
+ ;;
+
+ -dailytest)
+ __DailyTest=true
+ ;;
+
+ -ci)
+ __CI=true
+ __ManagedBuildArgs="$__ManagedBuildArgs $1"
+ __TestArgs="$__TestArgs $1"
+ ;;
+
+ -projects)
+ __ManagedBuildArgs="$__ManagedBuildArgs $1 $2"
+ __TestArgs="$__TestArgs $1 $2"
+ shift
+ ;;
+
+ -verbosity)
+ __Verbosity=$2
shift
;;
- --rootfs)
+
+ -configuration|-c)
+ __BuildType="$(to_lowercase "$2")"
shift
;;
- --build-native|--test|--daily-test|--stripsymbols)
+
+ -architecture)
+ __BuildArch="$(to_lowercase "$2")"
+ shift
;;
- --clang3.5|--clang3.6|--clang3.7|--clang3.8|--clang3.9|--clang4.0|--clang5.0)
+
+ -rootfs)
+ export ROOTFS_DIR="$2"
+ shift
+ ;;
+
+ -portablebuild=false)
+ __PortableBuild=0
;;
+
+ -stripsymbols)
+ __ExtraCmakeArgs="$__ExtraCmakeArgs -DSTRIP_SYMBOLS=true"
+ ;;
+
+ -clang3.5)
+ __ClangMajorVersion=3
+ __ClangMinorVersion=5
+ ;;
+
+ -clang3.6)
+ __ClangMajorVersion=3
+ __ClangMinorVersion=6
+ ;;
+
+ -clang3.7)
+ __ClangMajorVersion=3
+ __ClangMinorVersion=7
+ ;;
+
+ -clang3.8)
+ __ClangMajorVersion=3
+ __ClangMinorVersion=8
+ ;;
+
+ -clang3.9)
+ __ClangMajorVersion=3
+ __ClangMinorVersion=9
+ ;;
+
+ -clang4.0)
+ __ClangMajorVersion=4
+ __ClangMinorVersion=0
+ ;;
+
+ -clang5.0)
+ __ClangMajorVersion=5
+ __ClangMinorVersion=0
+ ;;
+
+ -clean|-binarylog|-bl|-pipelineslog|-pl|-restore|-r|-rebuild|-pack|-integrationtest|-performancetest|-sign|-publish|-preparemachine)
+ __ManagedBuildArgs="$__ManagedBuildArgs $1"
+ ;;
+
+ -warnaserror|-nodereuse)
+ __ManagedBuildArgs="$__ManagedBuildArgs $1 $2"
+ ;;
+
*)
- buildargs="$buildargs $1"
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
-# build managed components
-"$scriptroot/common/build.sh" $buildargs
-if [[ $? != 0 ]]; then
- exit 1
+if [ "$__BuildType" == "release" ]; then
+ __BuildType=Release
+fi
+if [ "$__BuildType" == "debug" ]; then
+ __BuildType=Debug
fi
-# build native components and test both
-"$scriptroot/build-native.sh" $buildnativeargs
-if [[ $? != 0 ]]; then
- exit 1
+# Needs to be set for generate version source file/msbuild
+if [[ -z $NUGET_PACKAGES ]]; then
+ if [[ $__CI == true ]]; then
+ export NUGET_PACKAGES="$__ProjectRoot/.packages"
+ else
+ export NUGET_PACKAGES="$HOME/.nuget/packages"
+ fi
fi
+
+echo $NUGET_PACKAGES
+
+__RootBinDir=$__ProjectRoot/artifacts
+__BinDir=$__RootBinDir/bin/$__BuildOS.$__BuildArch.$__BuildType
+__LogDir=$__RootBinDir/log/$__BuildOS.$__BuildArch.$__BuildType
+__IntermediatesDir=$__RootBinDir/obj/$__BuildOS.$__BuildArch.$__BuildType
+__ExtraCmakeArgs="$__ExtraCmakeArgs -DCLR_MANAGED_BINARY_DIR=$__RootBinDir/bin -DCLR_BUILD_TYPE=$__BuildType"
+__DotNetCli=$__ProjectRoot/.dotnet/dotnet
+
+# Specify path to be set for CMAKE_INSTALL_PREFIX.
+# This is where all built native libraries will copied to.
+export __CMakeBinDir="$__BinDir"
+
+# Set default clang version
+if [[ $__ClangMajorVersion == 0 && $__ClangMinorVersion == 0 ]]; then
+ if [[ "$__BuildArch" == "arm" || "$__BuildArch" == "armel" ]]; then
+ __ClangMajorVersion=5
+ __ClangMinorVersion=0
+ else
+ __ClangMajorVersion=3
+ __ClangMinorVersion=9
+ fi
+fi
+
+if [[ "$__BuildArch" == "armel" ]]; then
+ # Armel cross build is Tizen specific and does not support Portable RID build
+ __PortableBuild=0
+fi
+
+# Configure environment if we are doing a cross compile.
+if [ "${__BuildArch}" != "${__HostArch}" ]; then
+ __CrossBuild=true
+ export CROSSCOMPILE=1
+ if ! [[ -n "$ROOTFS_DIR" ]]; then
+ echo "ERROR: ROOTFS_DIR not set for cross build"
+ exit 1
+ fi
+ echo "ROOTFS_DIR: $ROOTFS_DIR"
+fi
+
+mkdir -p "$__IntermediatesDir"
+mkdir -p "$__LogDir"
+mkdir -p "$__CMakeBinDir"
+
+build_native()
+{
+ platformArch="$1"
+ intermediatesForBuild="$2"
+ extraCmakeArguments="$3"
+
+ # All set to commence the build
+ echo "Commencing $__DistroRid build for $__BuildOS.$__BuildArch.$__BuildType in $intermediatesForBuild"
+
+ generator=""
+ buildFile="Makefile"
+ buildTool="make"
+ scriptDir="$__ProjectRoot/eng"
+
+ pushd "$intermediatesForBuild"
+ echo "Invoking \"$scriptDir/gen-buildsys-clang.sh\" \"$__ProjectRoot\" $__ClangMajorVersion \"$__ClangMinorVersion\" $platformArch "$scriptDir" $__BuildType $generator $extraCmakeArguments"
+ "$scriptDir/gen-buildsys-clang.sh" "$__ProjectRoot" $__ClangMajorVersion "$__ClangMinorVersion" $platformArch "$scriptDir" $__BuildType $generator "$extraCmakeArguments"
+ popd
+
+ if [ ! -f "$intermediatesForBuild/$buildFile" ]; then
+ echo "Failed to generate build project!"
+ exit 1
+ fi
+
+ # Check that the makefiles were created.
+ pushd "$intermediatesForBuild"
+
+ echo "Executing $buildTool install -j $__NumProc"
+
+ $buildTool install -j $__NumProc | tee $__LogDir/make.log
+ if [ $? != 0 ]; then
+ echo "Failed to build."
+ exit 1
+ fi
+
+ popd
+}
+
+initTargetDistroRid()
+{
+ source "$__ProjectRoot/eng/init-distro-rid.sh"
+
+ local passedRootfsDir=""
+
+ # Only pass ROOTFS_DIR if cross is specified.
+ if [ $__CrossBuild == true ]; then
+ passedRootfsDir=${ROOTFS_DIR}
+ fi
+
+ initDistroRidGlobal ${__BuildOS} ${__BuildArch} ${__PortableBuild} ${passedRootfsDir}
+}
+
+#
+# Managed build
+#
+
+if [ $__Build == true ]; then
+ echo "Commencing managed build for $__BuildType in $__RootBinDir/bin"
+ "$__ProjectRoot/eng/common/build.sh" --build --configuration "$__BuildType" --verbosity "$__Verbosity" $__ManagedBuildArgs $__UnprocessedBuildArgs
+ if [ $? != 0 ]; then
+ exit 1
+ fi
+fi
+
+#
+# Initialize the target distro name
+#
+
+initTargetDistroRid
+
+echo "RID: $__DistroRid"
+
+#
+# Setup LLDB paths for native build
+#
+
+if [ "$__HostOS" == "OSX" ]; then
+ export LLDB_H=$__ProjectRoot/src/SOS/lldbplugin/swift-4.0
+ export LLDB_LIB=/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/LLDB
+ export LLDB_PATH=/Applications/Xcode.app/Contents/Developer/usr/bin/lldb
+
+ export MACOSX_DEPLOYMENT_TARGET=10.12
+
+ # If Xcode 9.2 exists (like on the CI/build machines), use that. Xcode 9.3 or
+ # greater (swift 4.1 lldb) doesn't work that well (seg faults on exit).
+ if [ -f "/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb" ]; then
+ if [ -f "/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB" ]; then
+ export LLDB_PATH=/Applications/Xcode_9.2.app/Contents/Developer/usr/bin/lldb
+ export LLDB_LIB=/Applications/Xcode_9.2.app/Contents/SharedFrameworks/LLDB.framework/LLDB
+ fi
+ fi
+
+ if [ ! -f $LLDB_LIB ]; then
+ echo "Cannot find the lldb library. Try installing Xcode."
+ exit 1
+ fi
+
+ # Workaround bad python version in /usr/local/bin/python2.7 on lab machines
+ export PATH=/usr/bin:$PATH
+ which python
+ python --version
+fi
+
+#
+# Build native components
+#
+
+if [ ! -e $__DotNetCli ]; then
+ echo "dotnet cli not installed $__DotNetCli"
+ exit 1
+fi
+
+if [ $__NativeBuild == true ]; then
+ if [[ $__CI == true ]]; then
+ echo "Generating Version Source File"
+ __GenerateVersionRestoreLog="$__LogDir/GenerateVersionRestore.binlog"
+ $__DotNetCli msbuild $__ProjectRoot/eng/CreateVersionFile.csproj /v:$__Verbosity /bl:$__GenerateVersionRestoreLog /t:Restore /p:Configuration="$__BuildType" /p:Platform="$__BuildArch" $__UnprocessedBuildArgs
+
+ __GenerateVersionLog="$__LogDir/GenerateVersion.binlog"
+ $__DotNetCli msbuild $__ProjectRoot/eng/CreateVersionFile.csproj /v:$__Verbosity /bl:$__GenerateVersionLog /t:GenerateVersionFiles /p:GenerateVersionSourceFile=true /p:NativeVersionSourceFile="$__IntermediatesDir/version.cpp" /p:Configuration="$__BuildType" /p:Platform="$__BuildArch" $__UnprocessedBuildArgs
+ if [ $? != 0 ]; then
+ echo "Generating Version Source File FAILED"
+ exit 1
+ fi
+ else
+ echo "Generating Empty Version Source File"
+ echo "" > "$__IntermediatesDir/version.cpp"
+ fi
+
+ build_native "$__BuildArch" "$__IntermediatesDir" "$__ExtraCmakeArgs"
+fi
+
+#
+# Copy the native SOS binaries to where these tools expect for testing
+#
+
+if [[ $__NativeBuild == true || $__Test == true ]]; then
+ __dotnet_sos=$__RootBinDir/bin/dotnet-sos/$__BuildType/netcoreapp2.1/publish/$__DistroRid
+ __dotnet_dump=$__RootBinDir/bin/dotnet-dump/$__BuildType/netcoreapp2.1/publish/$__DistroRid
+
+ mkdir -p "$__dotnet_sos"
+ mkdir -p "$__dotnet_dump"
+
+ cp "$__BinDir"/* "$__dotnet_sos"
+ echo "Copied SOS to $__dotnet_sos"
+
+ cp "$__BinDir"/* "$__dotnet_dump"
+ echo "Copied SOS to $__dotnet_dump"
+fi
+
+#
+# Run xunit tests
+#
+
+if [ $__Test == true ]; then
+ if [ $__CrossBuild != true ]; then
+ if [ "$LLDB_PATH" == "" ]; then
+ export LLDB_PATH="$(which lldb-3.9.1 2> /dev/null)"
+ if [ "$LLDB_PATH" == "" ]; then
+ export LLDB_PATH="$(which lldb-3.9 2> /dev/null)"
+ if [ "$LLDB_PATH" == "" ]; then
+ export LLDB_PATH="$(which lldb-4.0 2> /dev/null)"
+ if [ "$LLDB_PATH" == "" ]; then
+ export LLDB_PATH="$(which lldb-5.0 2> /dev/null)"
+ if [ "$LLDB_PATH" == "" ]; then
+ export LLDB_PATH="$(which lldb 2> /dev/null)"
+ fi
+ fi
+ fi
+ fi
+ fi
+
+ if [ "$GDB_PATH" == "" ]; then
+ export GDB_PATH="$(which gdb 2> /dev/null)"
+ fi
+
+ echo "lldb: '$LLDB_PATH' gdb: '$GDB_PATH'"
+
+ "$__ProjectRoot/eng/common/build.sh" --test --configuration "$__BuildType" --verbosity "$__Verbosity" /bl:$__LogDir/Test.binlog /p:BuildArch=$__BuildArch /p:DailyTest=$__DailyTest $__TestArgs
+ if [ $? != 0 ]; then
+ exit 1
+ fi
+ fi
+fi
+
+echo "BUILD: Repo sucessfully built."
+echo "BUILD: Product binaries are available at $__CMakeBinDir"
diff --git a/eng/cibuild.sh b/eng/cibuild.sh
index 0ded39d345..63dd473dfa 100755
--- a/eng/cibuild.sh
+++ b/eng/cibuild.sh
@@ -37,7 +37,7 @@ if [ "$__osname" == "Linux" ]; then
fi
fi
-"$scriptroot/build.sh" --restore --build --build-native --test --publish --ci --stripsymbols $@
+"$scriptroot/build.sh" --restore --build --native --test --publish --ci --stripsymbols $@
if [[ $? != 0 ]]; then
exit 1
fi
diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1
index b8d84607b8..5442eff386 100644
--- a/eng/common/CheckSymbols.ps1
+++ b/eng/common/CheckSymbols.ps1
@@ -5,11 +5,12 @@ param(
)
Add-Type -AssemblyName System.IO.Compression.FileSystem
+. $PSScriptRoot\pipeline-logging-functions.ps1
function FirstMatchingSymbolDescriptionOrDefault {
param(
[string] $FullPath, # Full path to the module that has to be checked
- [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $TargetServerParameter, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
[string] $SymbolsPath
)
@@ -21,36 +22,36 @@ function FirstMatchingSymbolDescriptionOrDefault {
# checking and which type of file was uploaded.
# The file itself is returned
- $SymbolPath = $SymbolsPath + "\" + $FileName
+ $SymbolPath = $SymbolsPath + '\' + $FileName
# PDB file for the module
- $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
+ $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
# PDB file for R2R module (created by crossgen)
- $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
+ $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
# DBG file for a .so library
- $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
+ $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
# DWARF file for a .dylib
- $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
- .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+ .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParameter $FullPath -o $SymbolsPath | Out-Null
if (Test-Path $PdbPath) {
- return "PDB"
+ return 'PDB'
}
elseif (Test-Path $NGenPdb) {
- return "NGen PDB"
+ return 'NGen PDB'
}
elseif (Test-Path $SODbg) {
- return "DBG for SO"
+ return 'DBG for SO'
}
elseif (Test-Path $DylibDwarf) {
- return "Dwarf for Dylib"
+ return 'Dwarf for Dylib'
}
elseif (Test-Path $SymbolPath) {
- return "Module"
+ return 'Module'
}
else {
return $null
@@ -68,7 +69,7 @@ function CountMissingSymbols {
}
# Extensions for which we'll look for symbols
- $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
+ $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
# How many files are missing symbol information
$MissingSymbols = 0
@@ -76,7 +77,7 @@ function CountMissingSymbols {
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$PackageGuid = New-Guid
$ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
- $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
[System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
@@ -86,31 +87,31 @@ function CountMissingSymbols {
Get-ChildItem -Recurse $ExtractPath |
Where-Object {$RelevantExtensions -contains $_.Extension} |
ForEach-Object {
- if ($_.FullName -Match "\\ref\\") {
+ if ($_.FullName -Match '\\ref\\') {
Write-Host "`t Ignoring reference assembly file" $_.FullName
return
}
- $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
- $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
+ $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--microsoft-symbol-server' -SymbolsPath $SymbolsPath
+ $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--internal-server' -SymbolsPath $SymbolsPath
Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
- Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
+ Write-Host "Symbols found on MSDL (${$SymbolsOnMSDL}) and SymWeb (${$SymbolsOnSymWeb})"
}
else {
$MissingSymbols++
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
- Write-Host "No symbols found on MSDL or SymWeb!"
+ Write-Host 'No symbols found on MSDL or SymWeb!'
}
else {
if ($SymbolsOnMSDL -eq $null) {
- Write-Host "No symbols found on MSDL!"
+ Write-Host 'No symbols found on MSDL!'
}
else {
- Write-Host "No symbols found on SymWeb!"
+ Write-Host 'No symbols found on SymWeb!'
}
}
}
@@ -129,26 +130,26 @@ function CheckSymbolsAvailable {
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$FileName = $_.Name
-
+
# These packages from Arcade-Services include some native libraries that
# our current symbol uploader can't handle. Below is a workaround until
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
- if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
+ if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
- elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
+ elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
-
+
Write-Host "Validating $FileName "
$Status = CountMissingSymbols "$InputPath\$FileName"
if ($Status -ne 0) {
- Write-Error "Missing symbols for $Status modules in the package $FileName"
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $Status modules in the package $FileName"
}
Write-Host
diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1
new file mode 100644
index 0000000000..4c1ea7c98e
--- /dev/null
+++ b/eng/common/PSScriptAnalyzerSettings.psd1
@@ -0,0 +1,11 @@
+@{
+ IncludeRules=@('PSAvoidUsingCmdletAliases',
+ 'PSAvoidUsingWMICmdlet',
+ 'PSAvoidUsingPositionalParameters',
+ 'PSAvoidUsingInvokeExpression',
+ 'PSUseDeclaredVarsMoreThanAssignments',
+ 'PSUseCmdletCorrectly',
+ 'PSStandardDSCFunctionsInResource',
+ 'PSUseIdenticalMandatoryParametersForDSC',
+ 'PSUseIdenticalParametersForDSC')
+}
\ No newline at end of file
diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj
index 5d55e312b0..311e2bbe0f 100644
--- a/eng/common/PublishToSymbolServers.proj
+++ b/eng/common/PublishToSymbolServers.proj
@@ -37,6 +37,8 @@
3650
true
+ true
+ true
false
@@ -56,7 +58,7 @@
DryRun="false"
ConvertPortablePdbsToWindowsPdbs="false"
PdbConversionTreatAsWarning=""
- Condition="$(PublishToSymbolServer)"/>
+ Condition="$(PublishToSymbolServer) and $(PublishToMSDL)"/>
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
+ 4:00
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
+ 4:00
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
new file mode 100644
index 0000000000..ec41965fc8
--- /dev/null
+++ b/eng/common/performance/performance-setup.ps1
@@ -0,0 +1,106 @@
+Param(
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
+ [string] $CoreRootDirectory,
+ [string] $BaselineCoreRootDirectory,
+ [string] $Architecture="x64",
+ [string] $Framework="netcoreapp5.0",
+ [string] $CompilationMode="Tiered",
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME,
+ [string] $Branch=$env:BUILD_SOURCEBRANCH,
+ [string] $CommitSha=$env:BUILD_SOURCEVERSION,
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
+ [string] $RunCategories="coreclr corefx",
+ [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
+ [string] $Kind="micro",
+ [switch] $Internal,
+ [switch] $Compare,
+ [string] $Configurations="CompilationMode=$CompilationMode"
+)
+
+$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
+$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
+$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
+
+$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
+$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
+$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
+$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+$Creator = $env:BUILD_DEFINITIONNAME
+$PerfLabArguments = ""
+$HelixSourcePrefix = "pr"
+
+$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
+
+if ($Framework.StartsWith("netcoreapp")) {
+ $Queue = "Windows.10.Amd64.ClientRS5.Open"
+}
+
+if ($Compare) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
+ $PerfLabArguments = ""
+ $ExtraBenchmarkDotNetArguments = ""
+}
+
+if ($Internal) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
+ $PerfLabArguments = "--upload-to-perflab-container"
+ $ExtraBenchmarkDotNetArguments = ""
+ $Creator = ""
+ $HelixSourcePrefix = "official"
+}
+
+$CommonSetupArguments="--frameworks $Framework --queue $Queue --build-number $BuildNumber --build-configs $Configurations"
+$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
+
+if ($RunFromPerformanceRepo) {
+ $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
+
+ robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
+}
+else {
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
+}
+
+if ($UseCoreRun) {
+ $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
+ Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
+}
+if ($UseBaselineCoreRun) {
+ $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
+ Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
+}
+
+$DocsDir = (Join-Path $PerformanceDirectory "docs")
+robocopy $DocsDir $WorkItemDirectory
+
+# Set variables that we will need to have in future steps
+$ci = $true
+
+. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
+
+# Directories
+Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
+
+# Script Arguments
+Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
+
+# Helix Arguments
+Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
new file mode 100644
index 0000000000..2f2092166e
--- /dev/null
+++ b/eng/common/performance/performance-setup.sh
@@ -0,0 +1,216 @@
+#!/usr/bin/env bash
+
+source_directory=$BUILD_SOURCESDIRECTORY
+core_root_directory=
+baseline_core_root_directory=
+architecture=x64
+framework=netcoreapp5.0
+compilation_mode=tiered
+repository=$BUILD_REPOSITORY_NAME
+branch=$BUILD_SOURCEBRANCH
+commit_sha=$BUILD_SOURCEVERSION
+build_number=$BUILD_BUILDNUMBER
+internal=false
+compare=false
+kind="micro"
+run_categories="coreclr corefx"
+csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
+configurations=
+run_from_perf_repo=false
+use_core_run=true
+use_baseline_core_run=true
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --sourcedirectory)
+ source_directory=$2
+ shift 2
+ ;;
+ --corerootdirectory)
+ core_root_directory=$2
+ shift 2
+ ;;
+ --baselinecorerootdirectory)
+ baseline_core_root_directory=$2
+ shift 2
+ ;;
+ --architecture)
+ architecture=$2
+ shift 2
+ ;;
+ --framework)
+ framework=$2
+ shift 2
+ ;;
+ --compilationmode)
+ compilation_mode=$2
+ shift 2
+ ;;
+ --repository)
+ repository=$2
+ shift 2
+ ;;
+ --branch)
+ branch=$2
+ shift 2
+ ;;
+ --commitsha)
+ commit_sha=$2
+ shift 2
+ ;;
+ --buildnumber)
+ build_number=$2
+ shift 2
+ ;;
+ --kind)
+ kind=$2
+ shift 2
+ ;;
+ --runcategories)
+ run_categories=$2
+ shift 2
+ ;;
+ --csproj)
+ csproj=$2
+ shift 2
+ ;;
+ --internal)
+ internal=true
+ shift 1
+ ;;
+ --compare)
+ compare=true
+ shift 1
+ ;;
+ --configurations)
+ configurations=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
+ echo " --architecture Architecture of the testing being run"
+ echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
+ echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --framework The framework to run, if not running in master"
+ echo " --compliationmode The compilation mode if not passing --configurations"
+ echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
+ echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
+ echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
+ echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
+ echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
+ echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
+ echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
+ echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
+ echo " --internal If the benchmarks are running as an official job."
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
+ run_from_perf_repo=true
+fi
+
+if [ -z "$configurations" ]; then
+ configurations="CompliationMode=$compilation_mode"
+fi
+
+if [ -z "$core_root_directory" ]; then
+ use_core_run=false
+fi
+
+if [ -z "$baseline_core_root_directory" ]; then
+ use_baseline_core_run=false
+fi
+
+payload_directory=$source_directory/Payload
+performance_directory=$payload_directory/performance
+workitem_directory=$source_directory/workitem
+extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+perflab_arguments=
+queue=Ubuntu.1804.Amd64.Open
+creator=$BUILD_DEFINITIONNAME
+helix_source_prefix="pr"
+
+if [[ "$compare" == true ]]; then
+ extra_benchmark_dotnet_arguments=
+ perflab_arguments=
+
+ # No open queues for arm64
+ if [[ "$architecture" = "arm64" ]]; then
+ echo "Compare not available for arm64"
+ exit 1
+ fi
+
+ queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
+fi
+
+if [[ "$internal" == true ]]; then
+ perflab_arguments="--upload-to-perflab-container"
+ helix_source_prefix="official"
+ creator=
+ extra_benchmark_dotnet_arguments=
+
+ if [[ "$architecture" = "arm64" ]]; then
+ queue=Ubuntu.1804.Arm64.Perf
+ else
+ queue=Ubuntu.1804.Amd64.Tiger.Perf
+ fi
+fi
+
+common_setup_arguments="--frameworks $framework --queue $queue --build-number $build_number --build-configs $configurations"
+setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
+
+if [[ "$run_from_perf_repo" = true ]]; then
+ payload_directory=
+ workitem_directory=$source_directory
+ performance_directory=$workitem_directory
+ setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
+else
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
+
+ docs_directory=$performance_directory/docs
+ mv $docs_directory $workitem_directory
+fi
+
+if [[ "$use_core_run" = true ]]; then
+ new_core_root=$payload_directory/Core_Root
+ mv $core_root_directory $new_core_root
+fi
+
+if [[ "$use_baseline_core_run" = true ]]; then
+ new_baseline_core_root=$payload_directory/Baseline_Core_Root
+ mv $baseline_core_root_directory $new_baseline_core_root
+fi
+
+ci=true
+
+_script_dir=$(pwd)/eng/common
+. "$_script_dir/pipeline-logging-functions.sh"
+
+# Make sure all of our variables are available for future steps
+Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
+Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Python" -value "$python3" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
+Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
+Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
+Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
+Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
diff --git a/eng/common/LoggingCommandFunctions.ps1 b/eng/common/pipeline-logging-functions.ps1
similarity index 64%
rename from eng/common/LoggingCommandFunctions.ps1
rename to eng/common/pipeline-logging-functions.ps1
index c225eaecbf..a3e1317ad4 100644
--- a/eng/common/LoggingCommandFunctions.ps1
+++ b/eng/common/pipeline-logging-functions.ps1
@@ -1,4 +1,4 @@
-# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1
+# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
@@ -12,6 +12,100 @@ $script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"
# TODO: BUG: Escape % ???
# TODO: Add test to verify don't need to escape "=".
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTelemetryError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Category,
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force)
+
+ $PSBoundParameters.Remove('Category') | Out-Null
+
+ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+ $PSBoundParameters.Remove('Message') | Out-Null
+ $PSBoundParameters.Add('Message', $Message)
+ Write-PipelineTaskError @PSBoundParameters
+}
+
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTaskError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force
+ )
+
+ if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
+ if($Type -eq 'error') {
+ Write-Host $Message -ForegroundColor Red
+ return
+ }
+ elseif ($Type -eq 'warning') {
+ Write-Host $Message -ForegroundColor Yellow
+ return
+ }
+ }
+
+ if(($Type -ne 'error') -and ($Type -ne 'warning')) {
+ Write-Host $Message
+ return
+ }
+ $PSBoundParameters.Remove('Force') | Out-Null
+ if(-not $PSBoundParameters.ContainsKey('Type')) {
+ $PSBoundParameters.Add('Type', 'error')
+ }
+ Write-LogIssue @PSBoundParameters
+ }
+
+ function Write-PipelineSetVariable {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Name,
+ [string]$Value,
+ [switch]$Secret,
+ [switch]$AsOutput,
+ [bool]$IsMultiJobVariable=$true)
+
+ if((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
+ 'variable' = $Name
+ 'isSecret' = $Secret
+ 'isOutput' = $IsMultiJobVariable
+ } -AsOutput:$AsOutput
+ }
+ }
+
+ function Write-PipelinePrependPath {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory=$true)]
+ [string]$Path,
+ [switch]$AsOutput)
+
+ if((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
+ }
+ }
+
<########################################
# Private functions.
########################################>
@@ -143,4 +237,4 @@ function Write-LogIssue {
}
Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
-}
\ No newline at end of file
+}
diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
new file mode 100644
index 0000000000..33c3f0d807
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.sh
@@ -0,0 +1,179 @@
+#!/usr/bin/env bash
+
+function Write-PipelineTelemetryError {
+ local telemetry_category=''
+ local force=false
+ local function_args=()
+ local message=''
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -category|-c)
+ telemetry_category=$2
+ shift
+ ;;
+ -force|-f)
+ force=true
+ ;;
+ -*)
+ function_args+=("$1 $2")
+ shift
+ ;;
+ *)
+ message=$*
+ ;;
+ esac
+ shift
+ done
+
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$message" >&2
+ return
+ fi
+
+ message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
+ function_args+=("$message")
+ if [[ $force == true ]]; then
+ function_args+=("-force")
+ fi
+
+ Write-PipelineTaskError $function_args
+}
+
+function Write-PipelineTaskError {
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$@" >&2
+ return
+ fi
+
+ local message_type="error"
+ local sourcepath=''
+ local linenumber=''
+ local columnnumber=''
+ local error_code=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -type|-t)
+ message_type=$2
+ shift
+ ;;
+ -sourcepath|-s)
+ sourcepath=$2
+ shift
+ ;;
+ -linenumber|-ln)
+ linenumber=$2
+ shift
+ ;;
+ -columnnumber|-cn)
+ columnnumber=$2
+ shift
+ ;;
+ -errcode|-e)
+ error_code=$2
+ shift
+ ;;
+ *)
+ break
+ ;;
+ esac
+
+ shift
+ done
+
+ local message="##vso[task.logissue"
+
+ message="$message type=$message_type"
+
+ if [ -n "$sourcepath" ]; then
+ message="$message;sourcepath=$sourcepath"
+ fi
+
+ if [ -n "$linenumber" ]; then
+ message="$message;linenumber=$linenumber"
+ fi
+
+ if [ -n "$columnnumber" ]; then
+ message="$message;columnnumber=$columnnumber"
+ fi
+
+ if [ -n "$error_code" ]; then
+ message="$message;code=$error_code"
+ fi
+
+ message="$message]$*"
+ echo "$message"
+}
+
+function Write-PipelineSetVariable {
+ if [[ "$ci" != true ]]; then
+ return
+ fi
+
+ local name=''
+ local value=''
+ local secret=false
+ local as_output=false
+ local is_multi_job_variable=true
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -name|-n)
+ name=$2
+ shift
+ ;;
+ -value|-v)
+ value=$2
+ shift
+ ;;
+ -secret|-s)
+ secret=true
+ ;;
+ -as_output|-a)
+ as_output=true
+ ;;
+ -is_multi_job_variable|-i)
+ is_multi_job_variable=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ value=${value/;/%3B}
+ value=${value/\\r/%0D}
+ value=${value/\\n/%0A}
+ value=${value/]/%5D}
+
+ local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
+
+ if [[ "$as_output" == true ]]; then
+ $message
+ else
+ echo "$message"
+ fi
+}
+
+function Write-PipelinePrependPath {
+ local prepend_path=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -path|-p)
+ prepend_path=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ export PATH="$prepend_path:$PATH"
+
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.prependpath]$prepend_path"
+ fi
+}
\ No newline at end of file
diff --git a/eng/common/post-build/darc-gather-drop.ps1 b/eng/common/post-build/darc-gather-drop.ps1
new file mode 100644
index 0000000000..81ff2a4377
--- /dev/null
+++ b/eng/common/post-build/darc-gather-drop.ps1
@@ -0,0 +1,44 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BarBuildId, # ID of the build which assets should be downloaded
+ [Parameter(Mandatory=$true)][string] $DropLocation, # Where the assets should be downloaded to
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, # Token used to access Maestro API
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', # Maestro API URL
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' # Version of Maestro API to use
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ Write-Host 'Installing DARC ...'
+
+ . $PSScriptRoot\..\darc-init.ps1
+ $exitCode = $LASTEXITCODE
+
+ if ($exitCode -ne 0) {
+ Write-PipelineTelemetryError -Category "Darc" -Message "Something failed while running 'darc-init.ps1'. Check for errors above. Exiting now..."
+ ExitWithExitCode $exitCode
+ }
+
+ # For now, only use a dry run.
+ # Ideally we would change darc to enable a quick request that
+ # would check whether the file exists that you can download it,
+ # and that it won't conflict with other files.
+ # https://github.com/dotnet/arcade/issues/3674
+ # Right now we can't remove continue-on-error because we ocassionally will have
+ # dependencies that have no associated builds (e.g. an old dependency).
+ # We need to add an option to baseline specific dependencies away, or add them manually
+ # to the BAR.
+ darc gather-drop --non-shipping `
+ --dry-run `
+ --continue-on-error `
+ --id $BarBuildId `
+ --output-dir $DropLocation `
+ --bar-uri $MaestroApiEndpoint `
+ --password $MaestroApiAccessToken `
+ --latest-location
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category "Darc" -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
new file mode 100644
index 0000000000..3d6129d72b
--- /dev/null
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -0,0 +1,24 @@
+# This script validates NuGet package metadata information using this
+# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
+
+param(
+ [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
+ [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/jver-verify/src/VerifyMicrosoftPackage/verify.ps1'
+
+ New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
+
+ Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
+
+ & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
new file mode 100644
index 0000000000..7d49744795
--- /dev/null
+++ b/eng/common/post-build/post-build-utils.ps1
@@ -0,0 +1,91 @@
+# Most of the functions in this file require the variables `MaestroApiEndPoint`,
+# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+. $PSScriptRoot\..\tools.ps1
+
+function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
+ Validate-MaestroVars
+
+ $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $headers.Add('Accept', $ContentType)
+ $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
+ return $headers
+}
+
+function Get-MaestroChannel([int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroBuild([int]$BuildId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
+
+ $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
+}
+
+function Trigger-Subscription([string]$SubscriptionId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
+}
+
+function Validate-MaestroVars {
+ try {
+ Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
+ Get-Variable MaestroApiVersion -Scope Global | Out-Null
+ Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
+
+ if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+ ExitWithExitCode 1
+ }
+
+ if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
+ ExitWithExitCode 1
+ }
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
+ Write-Host $_
+ ExitWithExitCode 1
+ }
+}
diff --git a/eng/common/post-build/promote-build.ps1 b/eng/common/post-build/promote-build.ps1
new file mode 100644
index 0000000000..ce45635fbd
--- /dev/null
+++ b/eng/common/post-build/promote-build.ps1
@@ -0,0 +1,48 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Check that the channel we are going to promote the build to exist
+ $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
+
+ if (!$channelInfo) {
+ Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Get info about which channels the build has already been promoted to
+ $buildInfo = Get-MaestroBuild -BuildId $BuildId
+
+ if (!$buildInfo) {
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Find whether the build is already assigned to the channel or not
+ if ($buildInfo.channels) {
+ foreach ($channel in $buildInfo.channels) {
+ if ($channel.Id -eq $ChannelId) {
+ Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
+ ExitWithExitCode 0
+ }
+ }
+ }
+
+ Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
+
+ Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
new file mode 100644
index 0000000000..cc9d059d04
--- /dev/null
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -0,0 +1,257 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
+ [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
+ [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
+# in the repository at a specific commit point. This is populated by inserting
+# all files present in the repo at a specific commit point.
+$global:RepoFiles = @{}
+
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 6
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+$ValidatePackage = {
+ param(
+ [string] $PackagePath # Full path to a Symbols.NuGet package
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-Host "Input file does not exist: $PackagePath"
+ return 1
+ }
+
+ # Extensions for which we'll look for SourceLink information
+ # For now we'll only care about Portable & Embedded PDBs
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+
+ Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+ $FailedFiles = 0
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ $Extension = [System.IO.Path]::GetExtension($_.Name)
+ $FakeName = -Join((New-Guid), $Extension)
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith('.resources.dll')) {
+ return
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+
+ $ValidateFile = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $RealPath,
+ [ref] $FailedFiles
+ )
+
+ $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
+ $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
+ $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
+
+ if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
+ $NumFailedLinks = 0
+
+ # We only care about Http addresses
+ $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
+
+ if ($Matches.Count -ne 0) {
+ $Matches.Value |
+ ForEach-Object {
+ $Link = $_
+ $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
+
+ $FilePath = $Link.Replace($CommitUrl, "")
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ # Only GitHub links are valid
+ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ $Status = 0
+ }
+ }
+ catch {
+ write-host $_
+ $Status = 0
+ }
+ }
+
+ if ($Status -ne 200) {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
+ }
+ }
+ }
+
+ if ($NumFailedLinks -ne 0) {
+ $FailedFiles.value++
+ $global:LASTEXITCODE = 1
+ }
+ }
+ }
+
+ &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
+ }
+ }
+ catch {
+
+ }
+ finally {
+ $zip.Dispose()
+ }
+
+ if ($FailedFiles -eq 0) {
+ Write-Host 'Passed.'
+ return 0
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
+ return 1
+ }
+}
+
+function ValidateSourceLinkLinks {
+ if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
+ if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
+ ExitWithExitCode 1
+ }
+ else {
+ $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
+ }
+ }
+
+ if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
+ ExitWithExitCode 1
+ }
+
+ if ($GHRepoName -ne '' -and $GHCommit -ne '') {
+ $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
+ $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
+
+ try {
+ # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
+ $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
+
+ foreach ($file in $Data) {
+ $Extension = [System.IO.Path]::GetExtension($file.path)
+
+ if ($CodeExtensions.Contains($Extension)) {
+ $RepoFiles[$file.path] = 1
+ }
+ }
+ }
+ catch {
+ Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
+ }
+ }
+ elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
+ Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
+ }
+
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ # Process each NuGet package in parallel
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
+ Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ Receive-Job -Id $Job.Id
+ Remove-Job -Id $Job.Id
+ }
+ }
+
+ $ValidationFailures = 0
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ if ($jobResult -ne '0') {
+ $ValidationFailures++
+ }
+ }
+ if ($ValidationFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
+ ExitWithExitCode 1
+ }
+}
+
+function InstallSourcelinkCli {
+ $sourcelinkCliPackageName = 'sourcelink'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
+ Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
+ }
+ else {
+ Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ InstallSourcelinkCli
+
+ ValidateSourceLinkLinks
+}
+catch {
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
new file mode 100644
index 0000000000..f7cfe986dd
--- /dev/null
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -0,0 +1,188 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion # Version of dotnet symbol to use
+)
+
+function FirstMatchingSymbolDescriptionOrDefault {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + '\' + $FileName
+
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+ & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+
+ if (Test-Path $PdbPath) {
+ return 'PDB'
+ }
+ elseif (Test-Path $NGenPdb) {
+ return 'NGen PDB'
+ }
+ elseif (Test-Path $SODbg) {
+ return 'DBG for SO'
+ }
+ elseif (Test-Path $DylibDwarf) {
+ return 'Dwarf for Dylib'
+ }
+ elseif (Test-Path $SymbolPath) {
+ return 'Module'
+ }
+ else {
+ return $null
+ }
+}
+
+function CountMissingSymbols {
+ param(
+ [string] $PackagePath # Path to a NuGet package
+ )
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ # Extensions for which we'll look for symbols
+ $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
+
+ # How many files are missing symbol information
+ $MissingSymbols = 0
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $PackageGuid = New-Guid
+ $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
+
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+
+ Get-ChildItem -Recurse $ExtractPath |
+ Where-Object {$RelevantExtensions -contains $_.Extension} |
+ ForEach-Object {
+ if ($_.FullName -Match '\\ref\\') {
+ Write-Host "`t Ignoring reference assembly file " $_.FullName
+ return
+ }
+
+ $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName '--microsoft-symbol-server' $SymbolsPath
+ $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName '--internal-server' $SymbolsPath
+
+ Write-Host -NoNewLine "`t Checking file " $_.FullName "... "
+
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host 'No symbols found on MSDL or SymWeb!'
+ }
+ else {
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host 'No symbols found on MSDL!'
+ }
+ else {
+ Write-Host 'No symbols found on SymWeb!'
+ }
+ }
+ }
+ }
+
+ Pop-Location
+
+ return $MissingSymbols
+}
+
+function CheckSymbolsAvailable {
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $FileName = $_.Name
+
+ # These packages from Arcade-Services include some native libraries that
+ # our current symbol uploader can't handle. Below is a workaround until
+ # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
+ if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+ elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+
+ Write-Host "Validating $FileName "
+ $Status = CountMissingSymbols "$InputPath\$FileName"
+
+ if ($Status -ne 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $Status modules in the package $FileName"
+ ExitWithExitCode $exitCode
+ }
+
+ Write-Host
+ }
+}
+
+function InstallDotnetSymbol {
+ $dotnetSymbolPackageName = 'dotnet-symbol'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
+ Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
+ }
+ else {
+ Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ InstallDotnetSymbol
+
+ CheckSymbolsAvailable
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
new file mode 100644
index 0000000000..55dea518ac
--- /dev/null
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -0,0 +1,64 @@
+param(
+ [Parameter(Mandatory=$true)][string] $SourceRepo,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Get all the $SourceRepo subscriptions
+ $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
+ $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
+
+ if (!$subscriptions) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
+ ExitWithExitCode 0
+ }
+
+ $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
+ $failedTriggeredSubscription = $false
+
+ # Get all enabled subscriptions that need dependency flow on 'everyBuild'
+ foreach ($subscription in $subscriptions) {
+ if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
+ Write-Host "Should trigger this subscription: ${$subscription.id}"
+ [void]$subscriptionsToTrigger.Add($subscription.id)
+ }
+ }
+
+ foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
+ try {
+ Write-Host "Triggering subscription '$subscriptionToTrigger'."
+
+ Trigger-Subscription -SubscriptionId $subscriptionToTrigger
+
+ Write-Host 'done.'
+ }
+ catch
+ {
+ Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ $failedTriggeredSubscription = $true
+ }
+ }
+
+ if ($subscriptionsToTrigger.Count -eq 0) {
+ Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+ }
+ elseif ($failedTriggeredSubscription) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'All subscriptions were triggered successfully!'
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index d0eec5163e..3872af59b9 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -1,8 +1,8 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
- [string] $configuration = "Debug",
+ [string] $configuration = 'Debug',
[string] $task,
- [string] $verbosity = "minimal",
+ [string] $verbosity = 'minimal',
[string] $msbuildEngine = $null,
[switch] $restore,
[switch] $prepareMachine,
@@ -32,7 +32,7 @@ function Print-Usage() {
}
function Build([string]$target) {
- $logSuffix = if ($target -eq "Execute") { "" } else { ".$target" }
+ $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
$outputPath = Join-Path $ToolsetDir "$task\\"
@@ -46,33 +46,32 @@ function Build([string]$target) {
}
try {
- if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
Print-Usage
exit 0
}
if ($task -eq "") {
- Write-Host "Missing required parameter '-task '" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red
Print-Usage
ExitWithExitCode 1
}
$taskProject = GetSdkTaskProject $task
if (!(Test-Path $taskProject)) {
- Write-Host "Unknown task: $task" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
ExitWithExitCode 1
}
if ($restore) {
- Build "Restore"
+ Build 'Restore'
}
- Build "Execute"
+ Build 'Execute'
}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 74080f22d1..9db582f279 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -1,97 +1,110 @@
Param(
- [string] $GuardianPackageName, # Required: the name of guardian CLI pacakge (not needed if GuardianCliLocation is specified)
- [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
- [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
- [string] $Repository, # Required: the name of the repository (e.g. dotnet/arcade)
- [string] $BranchName="master", # Optional: name of branch or version of gdn settings; defaults to master
- [string] $SourceDirectory, # Required: the directory where source files are located
- [string] $ArtifactsDirectory, # Required: the directory where build artifacts are located
- [string] $DncEngAccessToken, # Required: access token for dnceng; should be provided via KeyVault
- [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
- [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
- [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBranchName=$env:BUILD_SOURCEBRANCHNAME, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaRepositoryName, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
- [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
- [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $GuardianLoggerLevel="Standard" # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
+ [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
+ [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
+ [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
+ [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
+ [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
+ [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
+ [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
+ [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
+ [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
+ [string[]] $PoliCheckAdditionalRunConfigParams # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
)
-$ErrorActionPreference = "Stop"
-Set-StrictMode -Version 2.0
-$LASTEXITCODE = 0
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+ $LASTEXITCODE = 0
-#Replace repo names to the format of org/repo
-if (!($Repository.contains('/'))) {
- $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
-}
-else{
- $RepoName = $Repository;
-}
+ . $PSScriptRoot\..\tools.ps1
-if ($GuardianPackageName) {
- $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path "tools" "guardian.cmd"))
-} else {
- $guardianCliLocation = $GuardianCliLocation
-}
+ #Replace repo names to the format of org/repo
+ if (!($Repository.contains('/'))) {
+ $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
+ }
+ else{
+ $RepoName = $Repository;
+ }
+
+ if ($GuardianPackageName) {
+ $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
+ } else {
+ $guardianCliLocation = $GuardianCliLocation
+ }
-$ValidPath = Test-Path $guardianCliLocation
+ $workingDirectory = (Split-Path $SourceDirectory -Parent)
+ $ValidPath = Test-Path $guardianCliLocation
-if ($ValidPath -eq $False)
-{
- Write-Host "Invalid Guardian CLI Location."
- exit 1
-}
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
+ ExitWithExitCode 1
+ }
-& $(Join-Path $PSScriptRoot "init-sdl.ps1") -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $ArtifactsDirectory -DncEngAccessToken $DncEngAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
-$gdnFolder = Join-Path $ArtifactsDirectory ".gdn"
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ $gdnFolder = Join-Path $workingDirectory '.gdn'
-if ($TsaOnboard) {
- if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
- Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+ if ($TsaOnboard) {
+ if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
+ Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
+ ExitWithExitCode 1
}
- } else {
- Write-Host "Could not onboard to TSA -- not all required values ($$TsaCodebaseName, $$TsaNotificationEmail, $$TsaCodebaseAdmin, $$TsaBugAreaPath) were specified."
- exit 1
}
-}
-if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $ArtifactsDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -DncEngAccessToken $DncEngAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel
-}
-if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $ArtifactsDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -DncEngAccessToken $DncEngAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel
-}
+ if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
+ if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
-if ($UpdateBaseline) {
- & (Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -DncEngAccessToken $DncEngAccessToken -PushReason "Update baseline"
-}
+ if ($UpdateBaseline) {
+ & (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
+ }
-if ($TsaPublish) {
- if ($TsaBranchName -and $BuildNumber) {
- if (-not $TsaRepositoryName) {
- $TsaRepositoryName = "$($Repository)-$($BranchName)"
- }
- Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $SourceDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian tsa-publish failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+ if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+ $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ }
+ Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
+ ExitWithExitCode 1
}
- } else {
- Write-Host "Could not publish to TSA -- not all required values ($$TsaBranchName, $$BuildNumber) were specified."
- exit 1
}
}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ exit 1
+}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
new file mode 100644
index 0000000000..3c9bf10678
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -0,0 +1,80 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+
+function ExtractArtifacts {
+ if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path does not exist: $InputPath"
+ ExitWithExitCode 0
+ }
+ $Jobs = @()
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
+ }
+
+ foreach ($Job in $Jobs) {
+ Wait-Job -Id $Job.Id | Receive-Job
+ }
+}
+
+try {
+ . $PSScriptRoot\..\tools.ps1
+
+ $ExtractPackage = {
+ param(
+ [string] $PackagePath # Full path to a NuGet package
+ )
+
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+ Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ }
+ }
+ catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+ }
+ finally {
+ $zip.Dispose()
+ }
+ }
+ Measure-Command { ExtractArtifacts }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
index cbf5c36a8f..285f1ccdb0 100644
--- a/eng/common/sdl/init-sdl.ps1
+++ b/eng/common/sdl/init-sdl.ps1
@@ -1,28 +1,33 @@
Param(
[string] $GuardianCliLocation,
[string] $Repository,
- [string] $BranchName="master",
+ [string] $BranchName='master',
[string] $WorkingDirectory,
- [string] $DncEngAccessToken,
- [string] $GuardianLoggerLevel="Standard"
+ [string] $AzureDevOpsAccessToken,
+ [string] $GuardianLoggerLevel='Standard'
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
+. $PSScriptRoot\..\tools.ps1
+
+# Don't display the console progress UI - it's a huge perf hit
+$ProgressPreference = 'SilentlyContinue'
+
# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
-$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$DncEngAccessToken"))
+$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0-preview.1"
$zipFile = "$WorkingDirectory/gdn.zip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
-$gdnFolder = (Join-Path $WorkingDirectory ".gdn")
-Try
-{
+$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
+try {
# We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
- Write-Host "Downloading gdn folder from internal config repostiory..."
+ Write-Host 'Downloading gdn folder from internal config repostiory...'
Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
if (Test-Path $gdnFolder) {
# Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
@@ -30,19 +35,29 @@ Try
}
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
Write-Host $gdnFolder
-} Catch [System.Net.WebException] {
+ ExitWithExitCode 0
+} catch [System.Net.WebException] { } # Catch and ignore webexception
+try {
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
- Write-Host "Initializing Guardian..."
+ Write-Host 'Initializing Guardian...'
Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
& $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
- Write-Error "Guardian init failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
}
# We create the mainbaseline so it can be edited later
Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
& $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
if ($LASTEXITCODE -ne 0) {
- Write-Error "Guardian baseline failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
}
- & $(Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -DncEngAccessToken $DncEngAccessToken -PushReason "Initialize gdn folder"
-}
\ No newline at end of file
+ & $(Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Initialize gdn folder'
+ ExitWithExitCode 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
index b054737df1..256ffbfb93 100644
--- a/eng/common/sdl/packages.config
+++ b/eng/common/sdl/packages.config
@@ -1,4 +1,4 @@
-
+
-
+
diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1
index cacaf8e912..79d3d355c7 100644
--- a/eng/common/sdl/push-gdn.ps1
+++ b/eng/common/sdl/push-gdn.ps1
@@ -1,51 +1,65 @@
Param(
[string] $Repository,
- [string] $BranchName="master",
+ [string] $BranchName='master',
[string] $GdnFolder,
- [string] $DncEngAccessToken,
+ [string] $AzureDevOpsAccessToken,
[string] $PushReason
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
-# We create the temp directory where we'll store the sdl-config repository
-$sdlDir = Join-Path $env:TEMP "sdl"
-if (Test-Path $sdlDir) {
- Remove-Item -Force -Recurse $sdlDir
-}
+try {
+ . $PSScriptRoot\..\tools.ps1
-Write-Host "git clone https://dnceng:`$DncEngAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
-git clone https://dnceng:$DncEngAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git clone failed with exit code $LASTEXITCODE."
-}
-# We copy the .gdn folder from our local run into the git repository so it can be committed
-$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn"
-if (Get-Command Robocopy) {
- Robocopy /S $GdnFolder $sdlRepositoryFolder
-} else {
- rsync -r $GdnFolder $sdlRepositoryFolder
-}
-# cd to the sdl-config directory so we can run git there
-Push-Location $sdlDir
-# git add . --> git commit --> git push
-Write-Host "git add ."
-git add .
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git add failed with exit code $LASTEXITCODE."
-}
-Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
-git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git commit failed with exit code $LASTEXITCODE."
-}
-Write-Host "git push"
-git push
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git push failed with exit code $LASTEXITCODE."
-}
+ # We create the temp directory where we'll store the sdl-config repository
+ $sdlDir = Join-Path $env:TEMP 'sdl'
+ if (Test-Path $sdlDir) {
+ Remove-Item -Force -Recurse $sdlDir
+ }
-# Return to the original directory
-Pop-Location
\ No newline at end of file
+ Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+ git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ # We copy the .gdn folder from our local run into the git repository so it can be committed
+ $sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn'
+ if (Get-Command Robocopy) {
+ Robocopy /S $GdnFolder $sdlRepositoryFolder
+ } else {
+ rsync -r $GdnFolder $sdlRepositoryFolder
+ }
+ # cd to the sdl-config directory so we can run git there
+ Push-Location $sdlDir
+ # git add . --> git commit --> git push
+ Write-Host 'git add .'
+ git add .
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+ git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host 'git push'
+ git push
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+
+ # Return to the original directory
+ Pop-Location
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
\ No newline at end of file
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
index e6a86d03a2..40a084f796 100644
--- a/eng/common/sdl/run-sdl.ps1
+++ b/eng/common/sdl/run-sdl.ps1
@@ -5,61 +5,65 @@ Param(
[string] $GdnFolder,
[string[]] $ToolsList,
[string] $UpdateBaseline,
- [string] $GuardianLoggerLevel="Standard"
+ [string] $GuardianLoggerLevel='Standard',
+ [string[]] $CrScanAdditionalRunConfigParams,
+ [string[]] $PoliCheckAdditionalRunConfigParams
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
-# We store config files in the r directory of .gdn
-Write-Host $ToolsList
-$gdnConfigPath = Join-Path $GdnFolder "r"
-$ValidPath = Test-Path $GuardianCliLocation
+try {
+ . $PSScriptRoot\..\tools.ps1
-if ($ValidPath -eq $False)
-{
- Write-Host "Invalid Guardian CLI Location."
- exit 1
-}
+ # We store config files in the r directory of .gdn
+ Write-Host $ToolsList
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
-foreach ($tool in $ToolsList) {
- $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
- $config = $False
- Write-Host $tool
- # We have to manually configure tools that run on source to look at the source directory only
- if ($tool -eq "credscan") {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory : $TargetDirectory `""
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory : $TargetDirectory "
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
- }
- $config = $True
- }
- if ($tool -eq "policheck") {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target : $TargetDirectory `""
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target : $TargetDirectory "
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
- }
- $config = $True
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
}
- Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel --config $gdnConfigFile $config"
- if ($config) {
- & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel --config $gdnConfigFile
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian run for $tool using $gdnConfigFile failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+ $configParam = @('--config')
+
+ foreach ($tool in $ToolsList) {
+ $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
+ Write-Host $tool
+ # We have to manually configure tools that run on source to look at the source directory only
+ if ($tool -eq 'credscan') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
}
- } else {
- & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian run for $tool failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+ if ($tool -eq 'policheck') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
}
+
+ $configParam+=$gdnConfigFile
}
-}
+ Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
+ & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
\ No newline at end of file
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
new file mode 100644
index 0000000000..2973bcaf3a
--- /dev/null
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -0,0 +1,73 @@
+parameters:
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
+ dependsOn: '' # Optional: dependencies of the job
+ artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
+ # Usage:
+ # artifactNames:
+ # - 'BlobArtifacts'
+ # - 'Artifacts_Windows_NT_Release'
+
+jobs:
+- job: Run_SDL
+ dependsOn: ${{ parameters.dependsOn }}
+ displayName: Run SDL tool
+ variables:
+ - group: DotNet-VSTS-Bot
+ pool:
+ name: Hosted VS2017
+ steps:
+ - checkout: self
+ clean: true
+ - ${{ if ne(parameters.artifactNames, '') }}:
+ - ${{ each artifactName in parameters.artifactNames }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: current
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ - ${{ if eq(parameters.artifactNames, '') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: current
+ downloadType: specific files
+ itemPattern: "**"
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ displayName: Extract Blob Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ displayName: Extract Package Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+ - task: NuGetCommand@2
+ displayName: 'Install Guardian'
+ inputs:
+ restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ feedsToUse: config
+ nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
+ externalFeedCredentials: GuardianConnect
+ restoreDirectory: $(Build.SourcesDirectory)\.packages
+ - ${{ if ne(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1
+ -GuardianPackageName Microsoft.Guardian.Cli.0.7.2
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 1814e0ab61..ecebd0f03e 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,64 +1,33 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
-
condition: ''
-
- continueOnError: false
-
container: ''
-
+ continueOnError: false
dependsOn: ''
-
displayName: ''
-
- steps: []
-
pool: ''
-
+ steps: []
strategy: ''
-
timeoutInMinutes: ''
-
variables: []
-
workspace: ''
# Job base template specific parameters
- # Optional: Enable installing Microbuild plugin
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _TeamName - the name of your team
- # _SignType - 'test' or 'real'
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
enableMicrobuild: false
-
- # Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing to the build asset registry
enablePublishBuildAssets: false
-
- # Optional: Include PublishTestResults task
enablePublishTestResults: false
-
- # Optional: enable sending telemetry
- enableTelemetry: false
-
- # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
- helixRepo: ''
-
- # Optional: define the helix type for telemetry (example: 'build/product/')
- helixType: ''
-
- # Required: name of the job
+ enablePublishUsingPipelines: false
name: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ preSteps: []
runAsPublic: false
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
- job: ${{ parameters.name }}
@@ -90,7 +59,7 @@ jobs:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
variables:
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ each variable in parameters.variables }}:
@@ -122,21 +91,12 @@ jobs:
workspace: ${{ parameters.workspace }}
steps:
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- - task: sendStartTelemetry@0
- displayName: 'Send Helix Start Telemetry'
- inputs:
- helixRepo: ${{ parameters.helixRepo }}
- ${{ if ne(parameters.helixType, '') }}:
- helixType: ${{ parameters.helixType }}
- buildConfig: $(_BuildConfig)
- runAsPublic: ${{ parameters.runAsPublic }}
- continueOnError: ${{ parameters.continueOnError }}
- condition: always()
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
@@ -148,6 +108,16 @@ jobs:
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ - task: NuGetAuthenticate@0
+
+ - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
- ${{ each step in parameters.steps }}:
- ${{ step }}
@@ -160,20 +130,60 @@ jobs:
env:
TeamName: $(_TeamName)
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- - task: sendEndTelemetry@0
- displayName: 'Send Helix End Telemetry'
- continueOnError: ${{ parameters.continueOnError }}
- condition: always()
-
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
- ArtifactName: $(Agent.Os)_$(Agent.JobName)
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
@@ -187,7 +197,7 @@ jobs:
continueOnError: true
condition: always()
- - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(variables['_PublishUsingPipelines'], 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
@@ -195,6 +205,7 @@ jobs:
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
- task: PublishBuildArtifacts@1
displayName: Push Asset Manifests
inputs:
diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml
new file mode 100644
index 0000000000..f877fd7a89
--- /dev/null
+++ b/eng/common/templates/job/performance.yml
@@ -0,0 +1,95 @@
+parameters:
+ steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
+ variables: [] # optional -- list of additional variables to send to the template
+ jobName: '' # required -- job name
+ displayName: '' # optional -- display name for the job. Will use jobName if not passed
+ pool: '' # required -- name of the Build pool
+ container: '' # required -- name of the container
+ osGroup: '' # required -- operating system for the job
+ extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
+ frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
+ continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+ dependsOn: '' # optional -- dependencies of the job
+ timeoutInMinutes: 320 # optional -- timeout for the job
+ enableTelemetry: false # optional -- enable for telemetry
+
+jobs:
+- template: ../jobs/jobs.yml
+ parameters:
+ dependsOn: ${{ parameters.dependsOn }}
+ enableTelemetry: ${{ parameters.enableTelemetry }}
+ enablePublishBuildArtifacts: true
+ continueOnError: ${{ parameters.continueOnError }}
+
+ jobs:
+ - job: '${{ parameters.jobName }}'
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: '${{ parameters.displayName }}'
+ ${{ if eq(parameters.displayName, '') }}:
+ displayName: '${{ parameters.jobName }}'
+
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ - IsInternal: ''
+ - HelixApiAccessToken: ''
+ - HelixPreCommand: ''
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
+ - IsInternal: -Internal
+ - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
+ - IsInternal: --internal
+
+ - group: DotNet-HelixApi-Access
+ - group: dotnet-benchview
+
+ workspace:
+ clean: all
+ pool:
+ ${{ parameters.pool }}
+ container: ${{ parameters.container }}
+ strategy:
+ matrix:
+ ${{ each framework in parameters.frameworks }}:
+ ${{ framework }}:
+ _Framework: ${{ framework }}
+ steps:
+ - checkout: self
+ clean: true
+ # Run all of the steps to setup repo
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+ - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Windows)
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Unix)
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
+ displayName: Run ci setup script
+ # Run perf testing in helix
+ - template: /eng/common/templates/steps/perf-send-to-helix.yml
+ parameters:
+ HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
+ HelixAccessToken: $(HelixApiAccessToken)
+ HelixTargetQueues: $(Queue)
+ HelixPreCommands: $(HelixPreCommand)
+ Creator: $(Creator)
+ WorkItemTimeout: 4:00 # 4 hours
+ WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
+ CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
\ No newline at end of file
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index 620bd3c62e..b722975f9c 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -47,6 +47,10 @@ jobs:
downloadPath: '$(Build.StagingDirectory)/Download'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
@@ -59,6 +63,23 @@ jobs:
/p:Configuration=$(_BuildConfig)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs to VSTS
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index 6a2f98c036..c08225a9a9 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,19 +1,10 @@
parameters:
- # Optional: 'true' if failures in job.yml job should not fail the job
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
- # Optional: Enable installing Microbuild plugin
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _TeamName - the name of your team
- # _SignType - 'test' or 'real'
- enableMicrobuild: false
-
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
- # Optional: Enable publishing to the build asset registry
- enablePublishBuildAssets: false
-
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
@@ -23,19 +14,9 @@ parameters:
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
- # Optional: Include PublishTestResults task
- enablePublishTestResults: false
-
- # Optional: enable sending telemetry
- # if enabled then the 'helixRepo' parameter should also be specified
- enableTelemetry: false
-
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
- # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
- helixRepo: ''
-
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
@@ -62,29 +43,30 @@ jobs:
name: ${{ job.job }}
-- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- pool:
- vmImage: vs2017-win2016
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
-
-- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - template: ../job/generate-graph-files.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
- dependsOn:
- - Asset_Registry_Publish
- pool:
- vmImage: vs2017-win2016
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ pool:
+ vmImage: vs2017-win2016
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+
+ - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
+ - template: ../job/generate-graph-files.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
+ dependsOn:
+ - Asset_Registry_Publish
+ pool:
+ vmImage: vs2017-win2016
diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
new file mode 100644
index 0000000000..3a8755fbbb
--- /dev/null
+++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
@@ -0,0 +1,159 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+ stageName: ''
+ channelName: ''
+ channelId: ''
+ transportFeed: ''
+ shippingFeed: ''
+ symbolsFeed: ''
+
+stages:
+- stage: ${{ parameters.stageName }}
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: ${{ parameters.channelName }} Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job: publish_symbols
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ /p:PublishToMSDL=false
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }}))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+ continueOnError: true
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=true
+ /p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
+ /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
+ /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:PublishToMSDL=false
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
new file mode 100644
index 0000000000..3f572f8b12
--- /dev/null
+++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
@@ -0,0 +1,159 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+ stageName: ''
+ channelName: ''
+ channelId: ''
+ transportFeed: ''
+ shippingFeed: ''
+ symbolsFeed: ''
+
+stages:
+- stage: ${{ parameters.stageName }}
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: ${{ parameters.channelName }} Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job: publish_symbols
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }}))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
new file mode 100644
index 0000000000..9505cf170f
--- /dev/null
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -0,0 +1,92 @@
+variables:
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - group: DotNet-Blob-Feed
+ - group: DotNet-DotNetCli-Storage
+ - group: DotNet-MSRC-Storage
+ - group: Publish-Build-Assets
+
+ # .NET Core 3.1 Dev
+ - name: PublicDevRelease_31_Channel_Id
+ value: 128
+
+ # .NET Core 5 Dev
+ - name: NetCore_5_Dev_Channel_Id
+ value: 131
+
+ # .NET Eng - Validation
+ - name: Net_Eng_Validation_Channel_Id
+ value: 9
+
+ # .NET Eng - Latest
+ - name: Net_Eng_Latest_Channel_Id
+ value: 2
+
+ # .NET 3 Eng - Validation
+ - name: NET_3_Eng_Validation_Channel_Id
+ value: 390
+
+ # .NET 3 Eng
+ - name: NetCore_3_Tools_Channel_Id
+ value: 344
+
+ # .NET Core 3.0 Internal Servicing
+ - name: InternalServicing_30_Channel_Id
+ value: 184
+
+ # .NET Core 3.0 Release
+ - name: PublicRelease_30_Channel_Id
+ value: 19
+
+ # .NET Core 3.1 Release
+ - name: PublicRelease_31_Channel_Id
+ value: 129
+
+ # General Testing
+ - name: GeneralTesting_Channel_Id
+ value: 529
+
+ # .NET Core 3.1 Blazor Features
+ - name: NetCore_31_Blazor_Features_Channel_Id
+ value: 531
+
+ # .NET Core Experimental
+ - name: NetCore_Experimental_Channel_Id
+ value: 562
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2019-01-16"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+
+ # Feed Configurations
+ # These should include the suffix "/index.json"
+
+ # Default locations for Installers and checksums
+ # Public Locations
+ - name: ChecksumsBlobFeedUrl
+ value: https://dotnetclichecksums.blob.core.windows.net/dotnet/index.json
+ - name: InstallersBlobFeedUrl
+ value: https://dotnetcli.blob.core.windows.net/dotnet/index.json
+
+ # Private Locations
+ - name: InternalChecksumsBlobFeedUrl
+ value: https://dotnetclichecksumsmsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalChecksumsBlobFeedKey
+ value: $(dotnetclichecksumsmsrc-storage-key)
+
+ - name: InternalInstallersBlobFeedUrl
+ value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalInstallersBlobFeedKey
+ value: $(dotnetclimsrc-access-key)
diff --git a/eng/common/templates/post-build/darc-gather-drop.yml b/eng/common/templates/post-build/darc-gather-drop.yml
new file mode 100644
index 0000000000..3268ccaa55
--- /dev/null
+++ b/eng/common/templates/post-build/darc-gather-drop.yml
@@ -0,0 +1,23 @@
+parameters:
+ ChannelId: 0
+
+jobs:
+- job: gatherDrop
+ displayName: Gather Drop
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Darc gather-drop
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/darc-gather-drop.ps1
+ arguments: -BarBuildId $(BARBuildId)
+ -DropLocation $(Agent.BuildDirectory)/Temp/Drop/
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
new file mode 100644
index 0000000000..8a8d84f202
--- /dev/null
+++ b/eng/common/templates/post-build/post-build.yml
@@ -0,0 +1,223 @@
+parameters:
+ enableSourceLinkValidation: false
+ enableSigningValidation: true
+ enableSymbolValidation: false
+ enableNugetValidation: true
+ publishInstallersAndChecksums: false
+ SDLValidationParameters:
+ enable: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ symbolPublishingAdditionalParameters: ''
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Which stages should finish execution before post-build stages start
+ validateDependsOn:
+ - build
+ publishDependsOn:
+ - Validate
+
+stages:
+- stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate
+ jobs:
+ - ${{ if eq(parameters.enableNugetValidation, 'true') }}:
+ - job:
+ displayName: NuGet Validation
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - ${{ if eq(parameters.enableSigningValidation, 'true') }}:
+ - job:
+ displayName: Signing Validation
+ variables:
+ - template: common-variables.yml
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine dotnet
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+
+ - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
+ - job:
+ displayName: SourceLink Validation
+ variables:
+ - template: common-variables.yml
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+ - ${{ if eq(parameters.SDLValidationParameters.enable, 'true') }}:
+ - template: /eng/common/templates/job/execute-sdl.yml
+ parameters:
+ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+ artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev5_Publish'
+ channelName: '.NET Core 5 Dev'
+ channelId: 131
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Latest_Publish'
+ channelName: '.NET Eng - Latest'
+ channelId: 2
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Validation_Publish'
+ channelName: '.NET Eng - Validation'
+ channelId: 9
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'General_Testing_Publish'
+ channelName: 'General Testing'
+ channelId: 529
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Dev_Publishing'
+ channelName: '.NET Core Tooling Dev'
+ channelId: 548
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Release_Publishing'
+ channelName: '.NET Core Tooling Release'
+ channelId: 549
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NET_Internal_Tooling_Publishing'
+ channelName: '.NET Internal Tooling'
+ channelId: 551
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Experimental_Publishing'
+ channelName: '.NET Core Experimental'
+ channelId: 562
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json'
diff --git a/eng/common/templates/post-build/promote-build.yml b/eng/common/templates/post-build/promote-build.yml
new file mode 100644
index 0000000000..6b479c3b82
--- /dev/null
+++ b/eng/common/templates/post-build/promote-build.yml
@@ -0,0 +1,25 @@
+parameters:
+ ChannelId: 0
+
+jobs:
+- job:
+ displayName: Promote Build
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: ChannelId
+ value: ${{ parameters.ChannelId }}
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId $(ChannelId)
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 0000000000..716b53f740
--- /dev/null
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,40 @@
+jobs:
+- job: setupMaestroVars
+ displayName: Setup Maestro Vars
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - checkout: none
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ script: |
+ try {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+
+ $Channels = ""
+ $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," }
+
+ $IsStableBuild = $Content | Select -Index 2
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId;isOutput=true]$BarId"
+ Write-Host "##vso[task.setvariable variable=InitialChannels;isOutput=true]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild;isOutput=true]$IsStableBuild"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
new file mode 100644
index 0000000000..da669030da
--- /dev/null
+++ b/eng/common/templates/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
new file mode 100644
index 0000000000..b3ea9acf1f
--- /dev/null
+++ b/eng/common/templates/steps/perf-send-to-helix.yml
@@ -0,0 +1,66 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 $(Build.SourcesDirectory)\eng\common\performance\perfhelixpublish.proj /restore /t:Test /bl:$(Build.SourcesDirectory)\artifacts\log\$env:BuildConfig\SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/performance/perfhelixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/promote-build.yml b/eng/common/templates/steps/promote-build.yml
new file mode 100644
index 0000000000..b90404435d
--- /dev/null
+++ b/eng/common/templates/steps/promote-build.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
new file mode 100644
index 0000000000..8903ba57c0
--- /dev/null
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -0,0 +1,23 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuilLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 05df886f55..30becf01ea 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -23,6 +23,7 @@ parameters:
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
@@ -55,6 +56,7 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
@@ -85,6 +87,7 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index e3b5fe2dfd..d762c9f044 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -1,13 +1,11 @@
# Initialize variables if they aren't already defined.
# These may be defined as parameters of the importing script, or set after importing this script.
-[string]$architecture = if (Test-Path variable:architecture) { $architecture } else { "" }
-
# CI mode - set to true on CI server for PR validation build or official build.
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
-[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
+[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' }
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
# Binary log must be enabled on CI.
@@ -26,7 +24,7 @@
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
# Adjusts msbuild verbosity level.
-[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
+[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' }
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
@@ -41,19 +39,25 @@
# installed on the machine instead of downloading one.
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dot.net/v1/dotnet-install.ps1
+[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
+
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
# An array of names of processes to stop on script exit if prepareMachine is true.
-$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") }
+$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
+
+$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null }
set-strictmode -version 2.0
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
- New-Item -path $path -force -itemType "Directory" | Out-Null
+ New-Item -path $path -force -itemType 'Directory' | Out-Null
}
}
@@ -86,7 +90,7 @@ function Exec-Process([string]$command, [string]$commandArgs) {
return $global:LASTEXITCODE = $process.ExitCode
}
finally {
- # If we didn't finish then an error occured or the user hit ctrl-c. Either
+ # If we didn't finish then an error occurred or the user hit ctrl-c. Either
# way kill the process
if (-not $finished) {
$process.Kill()
@@ -94,68 +98,6 @@ function Exec-Process([string]$command, [string]$commandArgs) {
}
}
-function Write-PipelineTaskError {
- [CmdletBinding()]
- param(
- [Parameter(Mandatory = $true)]
- [string]$Message,
- [Parameter(Mandatory = $false)]
- [string]$Type = 'error',
- [string]$ErrCode,
- [string]$SourcePath,
- [string]$LineNumber,
- [string]$ColumnNumber,
- [switch]$AsOutput)
-
- if(!$ci) {
- if($Type -eq 'error') {
- Write-Host $Message -ForegroundColor Red
- return
- }
- elseif ($Type -eq 'warning') {
- Write-Host $Message -ForegroundColor Yellow
- return
- }
- }
-
- if(($Type -ne 'error') -and ($Type -ne 'warning')) {
- Write-Host $Message
- return
- }
- if(-not $PSBoundParameters.ContainsKey('Type')) {
- $PSBoundParameters.Add('Type', 'error')
- }
- Write-LogIssue @PSBoundParameters
-}
-
-function Write-PipelineSetVariable {
- [CmdletBinding()]
- param(
- [Parameter(Mandatory = $true)]
- [string]$Name,
- [string]$Value,
- [switch]$Secret,
- [switch]$AsOutput)
-
- if($ci) {
- Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
- 'variable' = $Name
- 'issecret' = $Secret
- } -AsOutput:$AsOutput
- }
-}
-
-function Write-PipelinePrependPath {
- [CmdletBinding()]
- param(
- [Parameter(Mandatory=$true)]
- [string]$Path,
- [switch]$AsOutput)
- if($ci) {
- Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
- }
-}
-
function InitializeDotNetCli([bool]$install) {
if (Test-Path variable:global:_DotNetInstallDir) {
return $global:_DotNetInstallDir
@@ -179,7 +121,7 @@ function InitializeDotNetCli([bool]$install) {
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
- $dotnetCmd = Get-Command "dotnet.exe" -ErrorAction SilentlyContinue
+ $dotnetCmd = Get-Command 'dotnet.exe' -ErrorAction SilentlyContinue
if ($dotnetCmd -ne $null) {
$env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
}
@@ -192,13 +134,13 @@ function InitializeDotNetCli([bool]$install) {
if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
- $dotnetRoot = Join-Path $RepoRoot ".dotnet"
+ $dotnetRoot = Join-Path $RepoRoot '.dotnet'
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
if ($install) {
- InstallDotNetSdk $dotnetRoot $dotnetSdkVersion $architecture
+ InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
} else {
- Write-PipelineTaskError "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
ExitWithExitCode 1
}
}
@@ -211,8 +153,9 @@ function InitializeDotNetCli([bool]$install) {
# It also ensures that VS msbuild will use the downloaded sdk targets.
$env:PATH = "$dotnetRoot;$env:PATH"
- # Make Sure that our bootstrapped dotnet cli is avaliable in future steps of the Azure Pipelines build
+ # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
+
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
@@ -220,44 +163,61 @@ function InitializeDotNetCli([bool]$install) {
}
function GetDotNetInstallScript([string] $dotnetRoot) {
- $installScript = Join-Path $dotnetRoot "dotnet-install.ps1"
+ $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
- Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+ Invoke-WebRequest "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1" -OutFile $installScript
}
return $installScript
}
-function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") {
+function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '') {
InstallDotNet $dotnetRoot $version $architecture
}
-function InstallDotNet([string] $dotnetRoot, [string] $version, [string] $architecture = "", [string] $runtime = "", [bool] $skipNonVersionedFiles = $false) { $installScript = GetDotNetInstallScript $dotnetRoot
+function InstallDotNet([string] $dotnetRoot,
+ [string] $version,
+ [string] $architecture = '',
+ [string] $runtime = '',
+ [bool] $skipNonVersionedFiles = $false,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '') {
+
$installScript = GetDotNetInstallScript $dotnetRoot
$installParameters = @{
Version = $version
InstallDir = $dotnetRoot
}
- if ($architecture) {
- if ($architecture -eq "arm") {
- $architecture="x86"
- }
- if ($architecture -eq "arm64") {
- $architecture="x64"
- }
- $installParameters.Architecture = $architecture
+ if ($architecture) { $installParameters.Architecture = $architecture }
+ if ($runtime) { $installParameters.Runtime = $runtime }
+ if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
+
+ try {
+ & $installScript @installParameters
}
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from public location."
+ # Only the runtime can be installed from a custom [private] location.
+ if ($runtime -and ($runtimeSourceFeed -or $runtimeSourceFeedKey)) {
+ if ($runtimeSourceFeed) { $installParameters.AzureFeed = $runtimeSourceFeed }
- if ($runtime) { $installParameters.Runtime = $runtime }
- if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
+ if ($runtimeSourceFeedKey) {
+ $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey)
+ $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes)
+ $installParameters.FeedCredential = $decodedString
+ }
- & $installScript @installParameters
- if ($lastExitCode -ne 0) {
- Write-PipelineTaskError -Message "Failed to install dotnet cli (exit code '$lastExitCode')."
- ExitWithExitCode $lastExitCode
+ try {
+ & $installScript @installParameters
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'."
+ }
+ }
}
}
@@ -278,16 +238,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { '15.9' }
$vsMinVersion = [Version]::new($vsMinVersionStr)
# Try msbuild command available in the environment.
if ($env:VSINSTALLDIR -ne $null) {
- $msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue
+ $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
if ($msbuildCmd -ne $null) {
# Workaround for https://github.com/dotnet/roslyn/issues/35793
# Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
- $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0])
+ $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
if ($msbuildVersion -ge $vsMinVersion) {
return $global:_MSBuildExe = $msbuildCmd.Path
@@ -307,7 +267,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
- if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
} else {
@@ -317,7 +277,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
- throw "Unable to find Visual Studio that has required version and components installed"
+ throw 'Unable to find Visual Studio that has required version and components installed'
}
}
@@ -341,7 +301,7 @@ function InstallXCopyMSBuild([string]$packageVersion) {
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = "RoslynTools.MSBuild"
+ $packageName = 'RoslynTools.MSBuild'
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
@@ -352,11 +312,12 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
Create-Directory $packageDir
Write-Host "Downloading $packageName $packageVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Invoke-WebRequest "https://dotnet.myget.org/F/roslyn-tools/api/v2/package/$packageName/$packageVersion/" -OutFile $packagePath
Unzip $packagePath $packageDir
}
- return Join-Path $packageDir "tools"
+ return Join-Path $packageDir 'tools'
}
#
@@ -373,32 +334,32 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
# or $null if no instance meeting the requirements is found on the machine.
#
function LocateVisualStudio([object]$vsRequirements = $null){
- if (Get-Member -InputObject $GlobalJson.tools -Name "vswhere") {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = "2.5.2"
+ $vswhereVersion = '2.5.2'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
- $vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
+ $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host "Downloading vswhere"
+ Write-Host 'Downloading vswhere'
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild")
+ $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
- if (Get-Member -InputObject $vsRequirements -Name "version") {
- $args += "-version"
+ if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ $args += '-version'
$args += $vsRequirements.version
}
- if (Get-Member -InputObject $vsRequirements -Name "components") {
+ if (Get-Member -InputObject $vsRequirements -Name 'components') {
foreach ($component in $vsRequirements.components) {
- $args += "-requires"
+ $args += '-requires'
$args += $component
}
}
@@ -424,28 +385,27 @@ function InitializeBuildTool() {
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
- if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
- if ($msbuildEngine -eq "dotnet") {
+ if ($msbuildEngine -eq 'dotnet') {
if (!$dotnetRoot) {
- Write-PipelineTaskError "/global.json must specify 'tools.dotnet'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
ExitWithExitCode 1
}
-
- $buildTool = @{ Path = Join-Path $dotnetRoot "dotnet.exe"; Command = "msbuild"; Tool = "dotnet"; Framework = "netcoreapp2.1" }
+ $buildTool = @{ Path = Join-Path $dotnetRoot 'dotnet.exe'; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
} catch {
- Write-PipelineTaskError $_
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
} else {
- Write-PipelineTaskError "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
}
@@ -454,15 +414,15 @@ function InitializeBuildTool() {
function GetDefaultMSBuildEngine() {
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
- if (Get-Member -InputObject $GlobalJson.tools -Name "vs") {
- return "vs"
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
+ return 'vs'
}
- if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
- return "dotnet"
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ return 'dotnet'
}
- Write-PipelineTaskError "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
ExitWithExitCode 1
}
@@ -471,9 +431,9 @@ function GetNuGetPackageCachePath() {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
if ($useGlobalNuGetCache) {
- $env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages"
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages'
} else {
- $env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages"
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages'
}
}
@@ -486,7 +446,7 @@ function GetSdkTaskProject([string]$taskName) {
}
function InitializeNativeTools() {
- if (Get-Member -InputObject $GlobalJson -Name "native-tools") {
+ if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
$nativeArgs= @{}
if ($ci) {
$nativeArgs = @{
@@ -515,14 +475,14 @@ function InitializeToolset() {
}
if (-not $restore) {
- Write-PipelineTaskError "Toolset version $toolsetVersion has not been restored."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$buildTool = InitializeBuildTool
- $proj = Join-Path $ToolsetDir "restore.proj"
- $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" }
+ $proj = Join-Path $ToolsetDir 'restore.proj'
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
'' | Set-Content $proj
@@ -544,7 +504,7 @@ function ExitWithExitCode([int] $exitCode) {
}
function Stop-Processes() {
- Write-Host "Killing running build processes..."
+ Write-Host 'Killing running build processes...'
foreach ($processName in $processesToStopOnExit) {
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
}
@@ -558,9 +518,21 @@ function Stop-Processes() {
function MSBuild() {
if ($pipelinesLog) {
$buildTool = InitializeBuildTool
+
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if ($ci -and $buildTool.Tool -eq 'dotnet') {
+ dotnet nuget locals http-cache -c
+
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
+ }
+
$toolsetBuildProject = InitializeToolset
$path = Split-Path -parent $toolsetBuildProject
- $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll")
+ $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')
$args += "/logger:$path"
}
@@ -575,11 +547,13 @@ function MSBuild() {
function MSBuild-Core() {
if ($ci) {
if (!$binaryLog) {
- throw "Binary log must be enabled in CI build."
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build.'
+ ExitWithExitCode 1
}
if ($nodeReuse) {
- throw "Node reuse must be disabled in CI build."
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
+ ExitWithExitCode 1
}
}
@@ -588,7 +562,10 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
if ($warnAsError) {
- $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
+ $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
+ }
+ else {
+ $cmdArgs += ' /p:TreatWarningsAsErrors=false'
}
foreach ($arg in $args) {
@@ -600,7 +577,7 @@ function MSBuild-Core() {
$exitCode = Exec-Process $buildTool.Path $cmdArgs
if ($exitCode -ne 0) {
- Write-PipelineTaskError "Build failed."
+ Write-PipelineTelemetryError Category 'Build' -Message 'Build failed.'
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
if ($buildLog -ne $null) {
@@ -615,12 +592,12 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) {
foreach ($argument in $arguments) {
if ($argument -ne $null) {
$arg = $argument.Trim()
- if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
- return $arg.Substring("/bl:".Length)
+ if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
+ return $arg.Substring('/bl:'.Length)
}
- if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
- return $arg.Substring("/binaryLogger:".Length)
+ if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
+ return $arg.Substring('/binaryLogger:'.Length)
}
}
}
@@ -628,16 +605,16 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) {
return $null
}
-. $PSScriptRoot\LoggingCommandFunctions.ps1
+. $PSScriptRoot\pipeline-logging-functions.ps1
-$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
-$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
-$ArtifactsDir = Join-Path $RepoRoot "artifacts"
-$ToolsetDir = Join-Path $ArtifactsDir "toolset"
-$ToolsDir = Join-Path $RepoRoot ".tools"
-$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
-$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
-$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
+$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
+$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
+$ToolsDir = Join-Path $RepoRoot '.tools'
+$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
# true if global.json contains a "runtimes" section
$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
@@ -650,3 +627,18 @@ Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
+
+# Import custom tools configuration, if present in the repo.
+# Note: Import in global scope so that the script set top-level variables without qualification.
+if (!$disableConfigureToolsetImport) {
+ $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
+ if (Test-Path $configureToolsetScript) {
+ . $configureToolsetScript
+ if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
+ if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
+ ExitWithExitCode $LastExitCode
+ }
+ }
+ }
+}
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index a8dffd3452..f58db5e25d 100644
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -45,6 +45,10 @@ warn_as_error=${warn_as_error:-true}
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dot.net/v1/dotnet-install.sh
+dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
+
# True to use global NuGet cache instead of restoring packages to repository-local directory.
if [[ "$ci" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
@@ -52,78 +56,6 @@ else
use_global_nuget_cache=${use_global_nuget_cache:-true}
fi
-function EmitError {
- if [[ "$ci" != true ]]; then
- echo "$@" >&2
- return
- fi
-
- message_type="error"
- sourcepath=''
- linenumber=''
- columnnumber=''
- error_code=''
-
- while [[ $# -gt 0 ]]; do
- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
- case "$opt" in
- -type|-t)
- message_type=$2
- shift
- ;;
- -sourcepath|-s)
- sourcepath=$2
- shift
- ;;
- -linenumber|-l)
- linenumber=$2
- shift
- ;;
- -columnnumber|-col)
- columnnumber=$2
- shift
- ;;
- -code|-c)
- error_code=$2
- shift
- ;;
- *)
- break
- ;;
- esac
-
- shift
- done
-
- message='##vso[task.logissue'
-
- message="$message type=$message_type"
-
- if [ -n "$sourcepath" ]; then
- message="$message;sourcepath=$sourcepath"
- else
- message="$message;sourcepath=${BASH_SOURCE[1]}"
- fi
-
- if [ -n "$linenumber" ]; then
- message="$message;linenumber=$linenumber"
- else
- message="$message;linenumber=${BASH_LINENO[0]}"
- fi
-
- if [ -n "$columnnumber" ]; then
- message="$message;columnnumber=$columnnumber"
- fi
-
- if [ -n "$error_code" ]; then
- message="$message;code=$error_code"
- fi
-
- message="$message]$*"
-
- echo "$message"
-}
-
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
@@ -149,7 +81,7 @@ function ReadGlobalVersion {
local pattern="\"$key\" *: *\"(.*)\""
if [[ ! $line =~ $pattern ]]; then
- EmitError "Error: Cannot find \"$key\" in $global_json_file"
+ Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
ExitWithExitCode 1
fi
@@ -210,7 +142,7 @@ function InitializeDotNetCli {
if [[ "$install" == true ]]; then
InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
else
- EmitError "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
ExitWithExitCode 1
fi
fi
@@ -218,14 +150,10 @@ function InitializeDotNetCli {
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
- export PATH="$dotnet_root:$PATH"
+ Write-PipelinePrependPath -path "$dotnet_root"
- if [[ $ci == true ]]; then
- # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
- echo "##vso[task.prependpath]$dotnet_root"
- echo "##vso[task.setvariable variable=DOTNET_MULTILEVEL_LOOKUP]0"
- echo "##vso[task.setvariable variable=DOTNET_SKIP_FIRST_TIME_EXPERIENCE]1"
- fi
+ Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
+ Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
# return value
_InitializeDotNetCli="$dotnet_root"
@@ -263,15 +191,35 @@ function InstallDotNet {
fi
bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || {
local exit_code=$?
- EmitError "Failed to install dotnet SDK (exit code '$exit_code')."
- ExitWithExitCode $exit_code
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from public location (exit code '$exit_code')."
+
+ if [[ -n "$runtimeArg" ]]; then
+ local runtimeSourceFeed=''
+ if [[ -n "${6:-}" ]]; then
+ runtimeSourceFeed="--azure-feed $6"
+ fi
+
+ local runtimeSourceFeedKey=''
+ if [[ -n "${7:-}" ]]; then
+ decodedFeedKey=`echo $7 | base64 --decode`
+ runtimeSourceFeedKey="--feed-credential $decodedFeedKey"
+ fi
+
+ if [[ -n "$runtimeSourceFeed" || -n "$runtimeSourceFeedKey" ]]; then
+ bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg $runtimeSourceFeed $runtimeSourceFeedKey || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from custom location '$runtimeSourceFeed' (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ fi
+ fi
}
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
- local install_script_url="https://dot.net/v1/dotnet-install.sh"
+ local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
@@ -280,12 +228,19 @@ function GetDotNetInstallScript {
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
- curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script"
- else
- wget -q -O "$install_script" "$install_script_url"
+ curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ else
+ wget -q -O "$install_script" "$install_script_url" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
fi
fi
-
# return value
_GetDotNetInstallScript="$install_script"
}
@@ -317,11 +272,14 @@ function GetNuGetPackageCachePath {
}
function InitializeNativeTools() {
+ if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
+ return
+ fi
if grep -Fq "native-tools" $global_json_file
then
local nativeArgs=""
if [[ "$ci" == true ]]; then
- nativeArgs="-InstallDirectory $tools_dir"
+ nativeArgs="--installDirectory $tools_dir"
fi
"$_script_dir/init-tools-native.sh" $nativeArgs
fi
@@ -349,7 +307,7 @@ function InitializeToolset {
fi
if [[ "$restore" != true ]]; then
- EmitError "Toolset version $toolsetVersion has not been restored."
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored."
ExitWithExitCode 2
fi
@@ -366,7 +324,7 @@ function InitializeToolset {
local toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
- EmitError "Invalid toolset path: $toolset_build_proj"
+ Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
@@ -393,6 +351,18 @@ function MSBuild {
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
+
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if [[ "$ci" == true ]]; then
+ "$_InitializeBuildTool" nuget locals http-cache -c
+
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+ fi
+
local toolset_dir="${_InitializeToolset%/*}"
local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll"
args=( "${args[@]}" "-logger:$logger_path" )
@@ -404,12 +374,12 @@ function MSBuild {
function MSBuild-Core {
if [[ "$ci" == true ]]; then
if [[ "$binary_log" != true ]]; then
- EmitError "Binary log must be enabled in CI build."
+ Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build."
ExitWithExitCode 1
fi
if [[ "$node_reuse" == true ]]; then
- EmitError "Node reuse must be disabled in CI build."
+ Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
ExitWithExitCode 1
fi
fi
@@ -423,7 +393,7 @@ function MSBuild-Core {
"$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
local exit_code=$?
- EmitError "Build failed (exit code '$exit_code')."
+ Write-PipelineTelemetryError -category 'Build' "Build failed (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
}
@@ -431,6 +401,8 @@ function MSBuild-Core {
ResolvePath "${BASH_SOURCE[0]}"
_script_dir=`dirname "$_ResolvePath"`
+. "$_script_dir/pipeline-logging-functions.sh"
+
eng_root=`cd -P "$_script_dir/.." && pwd`
repo_root=`cd -P "$_script_dir/../.." && pwd`
artifacts_dir="$repo_root/artifacts"
@@ -457,7 +429,23 @@ mkdir -p "$toolset_dir"
mkdir -p "$temp_dir"
mkdir -p "$log_dir"
-if [[ $ci == true ]]; then
- export TEMP="$temp_dir"
- export TMP="$temp_dir"
+Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir"
+Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
+Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
+Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
+Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
+
+# Import custom tools configuration, if present in the repo.
+if [ -z "${disable_configure_toolset_import:-}" ]; then
+ configure_toolset_script="$eng_root/configure-toolset.sh"
+ if [[ -a "$configure_toolset_script" ]]; then
+ . "$configure_toolset_script"
+ fi
+fi
+
+# TODO: https://github.com/dotnet/arcade/issues/1468
+# Temporary workaround to avoid breaking change.
+# Remove once repos are updated.
+if [[ -n "${useInstalledDotNetCli:-}" ]]; then
+ use_installed_dotnet_cli="$useInstalledDotNetCli"
fi
diff --git a/eng/install-test-runtimes.ps1 b/eng/install-test-runtimes.ps1
deleted file mode 100644
index ecde1064ab..0000000000
--- a/eng/install-test-runtimes.ps1
+++ /dev/null
@@ -1,99 +0,0 @@
-[cmdletbinding()]
-param(
- [string]$DotNetDir,
- [string]$TempDir,
- [string]$BuildArch,
- [switch]$DailyTest,
- [string]$Branch="master",
- [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet"
-)
-
-Set-StrictMode -Version Latest
-$ErrorActionPreference="Stop"
-
-function Get-Latest-Version([string]$channel, [string]$runtime = "", [bool]$coherent = $false) {
-
- $VersionFileUrl = $null
- if ($runtime -eq "dotnet") {
- $VersionFileUrl = "$UncachedFeed/Runtime/$channel/latest.version"
- }
- elseif ($runtime -eq "aspnetcore") {
- $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$channel/latest.version"
- }
- elseif ($runtime -eq "") {
- if ($coherent) {
- $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
- }
- else {
- $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
- }
- }
- else {
- throw "Invalid value for $runtime"
- }
-
- $VersionFile = Join-Path -Path $TempDir latest.version
-
- try {
- Invoke-WebRequest $VersionFileUrl -OutFile $VersionFile
- }
- catch {
- return ""
- }
-
- if (Test-Path $VersionFile) {
- $VersionText = cat $VersionFile
- $Data = @($VersionText.Split([char[]]@(), [StringSplitOptions]::RemoveEmptyEntries));
- return $Data[1].Trim()
- }
-
- return ""
-}
-
-$ConfigFile = Join-Path -Path $DotNetDir Debugger.Tests.Versions.txt
-
-$RuntimeVersion21="2.1.12"
-$AspNetCoreVersion21="2.1.12"
-$RuntimeVersion30="3.0.0"
-$AspNetCoreVersion30="3.0.0"
-$RuntimeVersion31="3.1.0"
-$AspNetCoreVersion31="3.1.0"
-$RuntimeVersionLatest=""
-$AspNetCoreVersionLatest=""
-
-$DailyTestText="true"
-if (!$DailyTest) {
- $DailyTestText="false"
-}
-
-# Get the latest runtime versions for master and create a config file containing it
-try {
- $RuntimeVersionLatest = Get-Latest-Version $Branch dotnet
- $AspNetCoreVersionLatest = Get-Latest-Version $Branch aspnetcore
- Write-Host "Latest $Branch runtime: $RuntimeVersionLatest aspnetcore: $AspNetCoreVersionLatest"
-}
-catch {
- Write-Host "Could not download latest $Branch runtime version"
-}
-
-# Install the other versions of .NET Core runtime we are going to test. 2.1.x, 3.0.x, 3.1.x and latest.
-. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersion21 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
-. $DotNetDir\dotnet-install.ps1 -Version $AspNetCoreVersion21 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime aspnetcore -InstallDir $DotNetDir
-
-. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersion31 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
-. $DotNetDir\dotnet-install.ps1 -Version $AspNetCoreVersion31 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime aspnetcore -InstallDir $DotNetDir
-
-. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersionLatest -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
-. $DotNetDir\dotnet-install.ps1 -Version $AspNetCoreVersionLatest -Architecture $BuildArch -SkipNonVersionedFiles -Runtime aspnetcore -InstallDir $DotNetDir
-
-'
- ' + $DailyTestText +'
- ' + $RuntimeVersion21 + '
- ' + $AspNetCoreVersion21 + '
- ' + $RuntimeVersion30 + '
- ' + $AspNetCoreVersion30 + '
- ' + $RuntimeVersion31 + '
- ' + $AspNetCoreVersion31 + '
- ' + $RuntimeVersionLatest + '
- ' + $AspNetCoreVersionLatest + '
-' | Set-Content $ConfigFile
diff --git a/eng/install-test-runtimes.sh b/eng/install-test-runtimes.sh
deleted file mode 100755
index 6073b47035..0000000000
--- a/eng/install-test-runtimes.sh
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/usr/bin/env bash
-# Copyright (c) .NET Foundation and contributors. All rights reserved.
-# Licensed under the MIT license. See LICENSE file in the project root for full license information.
-
-dotnet_dir=
-temp_dir=
-build_arch=
-daily_test=0
-branch="master"
-uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
-
-# args:
-# channel - $1
-# runtime - $2 (dotnet, aspnetcore, "")
-# coherent - $3
-get_latest_version_info() {
- eval $invocation
-
- local channel="$1"
- local runtime="$2"
- local coherent="$3"
-
- local version_file="$temp_dir/latest.version"
- local version_file_url=null
- local version_latest=""
-
- if [[ "$runtime" == "dotnet" ]]; then
- version_file_url="$uncached_feed/Runtime/$channel/latest.version"
- elif [[ "$runtime" == "aspnetcore" ]]; then
- version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
- elif [ -z "$runtime" ]; then
- if [ "$coherent" = true ]; then
- version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
- else
- version_file_url="$uncached_feed/Sdk/$channel/latest.version"
- fi
- else
- echo "Invalid value for $runtime"
- return 1
- fi
-
- # Use curl if available, otherwise use wget
- if command -v curl > /dev/null; then
- curl "$version_file_url" -sSL --retry 10 --create-dirs -o "$version_file"
- else
- wget -q -O "$version_file" "$version_file_url"
- fi
-
- if [ -f "$version_file" ]; then
- version_latest="$(cat $version_file | tail -n 1 | tr -d "\r")"
- else
- echo "Could not download latest runtime version file"
- return 1
- fi
-
- echo "$version_latest"
- return 0
-}
-
-while [ $# -ne 0 ]; do
- name=$1
- case $name in
- --dotnet-directory)
- shift
- dotnet_dir=$1
- ;;
- --temp-directory)
- shift
- temp_dir=$1
- ;;
- --architecture)
- shift
- build_arch=$1
- ;;
- --branch)
- shift
- branch=$1
- ;;
- --daily-test)
- daily_test=1
- ;;
- *)
- args="$args $1"
- ;;
- esac
- shift
-done
-
-runtime_version_21="2.1.14"
-aspnetcore_version_21="2.1.14"
-runtime_version_30="3.0.0"
-aspnetcore_version_30="3.0.0"
-runtime_version_31="3.1.0"
-aspnetcore_version_31="3.1.0"
-runtime_version_latest=""
-aspnetcore_version_latest=""
-
-config_file="$dotnet_dir/Debugger.Tests.Versions.txt"
-
-daily_test_text="true"
-if [ $daily_test == 0 ]; then
- daily_test_text="false"
-fi
-
-runtime_version_latest="$(get_latest_version_info $branch dotnet false)"
-aspnetcore_version_latest="$(get_latest_version_info $branch aspnetcore false)"
-echo "Latest $branch runtime: $runtime_version_latest aspnetcore: $aspnetcore_version_latest"
-
-# Install the other versions of .NET Core runtime we are going to test. 2.1.x, 3.0.x, 3.1.x and latest.
-bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_21" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
-bash "$dotnet_dir/dotnet-install.sh" --version "$aspnetcore_version_21" --architecture "$build_arch" --skip-non-versioned-files --runtime aspnetcore --install-dir "$dotnet_dir"
-
-bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_31" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
-bash "$dotnet_dir/dotnet-install.sh" --version "$aspnetcore_version_31" --architecture "$build_arch" --skip-non-versioned-files --runtime aspnetcore --install-dir "$dotnet_dir"
-
-bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_latest" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
-bash "$dotnet_dir/dotnet-install.sh" --version "$aspnetcore_version_latest" --architecture "$build_arch" --skip-non-versioned-files --runtime aspnetcore --install-dir "$dotnet_dir"
-
-echo "
- $daily_test_text
- $runtime_version_21
- $aspnetcore_version_21
- $runtime_version_30
- $aspnetcore_version_30
- $runtime_version_31
- $aspnetcore_version_31
- $runtime_version_latest
- $aspnetcore_version_latest
-" > $config_file
diff --git a/global.json b/global.json
index add2e1c509..43d26258cb 100644
--- a/global.json
+++ b/global.json
@@ -6,6 +6,6 @@
"dotnet": "3.0.100"
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19309.1"
+ "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.19608.1"
}
}
diff --git a/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt b/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt
index 843a14276e..4b469e2f54 100644
--- a/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt
+++ b/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt
@@ -1,7 +1,7 @@
netcoreapp3.0
$(RuntimeVersion31)
@@ -65,7 +67,7 @@
$(RuntimeVersion30)
$(RuntimeVersionLatest)
-
- -->
-
- $(RepoRootDir)/.dotnet/dotnet
+ $(DotNetRoot)/dotnet
--fx-version $(RuntimeFrameworkVersion)
- $(RepoRootDir)/.dotnet/shared/Microsoft.NETCore.App/$(RuntimeFrameworkVersion)
+ $(DotNetRoot)/shared/Microsoft.NETCore.App/$(RuntimeFrameworkVersion)
$(ScriptRootDir)/lldbhelper.py
diff --git a/src/SOS/SOS.UnitTests/ConfigFiles/Windows/Debugger.Tests.Config.txt b/src/SOS/SOS.UnitTests/ConfigFiles/Windows/Debugger.Tests.Config.txt
index 6540ed2739..01cf29c5c6 100644
--- a/src/SOS/SOS.UnitTests/ConfigFiles/Windows/Debugger.Tests.Config.txt
+++ b/src/SOS/SOS.UnitTests/ConfigFiles/Windows/Debugger.Tests.Config.txt
@@ -1,7 +1,7 @@
netcoreapp3.0
$(RuntimeVersion31)
@@ -73,7 +75,7 @@
$(RuntimeVersion30)
$(RuntimeVersionLatest)
-
- -->
-
- $(RepoRootDir)\.dotnet\dotnet.exe
+ $(DotNetRoot)\dotnet.exe
--fx-version $(RuntimeFrameworkVersion)
- $(RepoRootDir)\.dotnet\shared\Microsoft.NETCore.App\$(RuntimeFrameworkVersion)
+ $(DotNetRoot)\shared\Microsoft.NETCore.App\$(RuntimeFrameworkVersion)
$(InstallDir)\sos.dll
$(RootBinDir)/bin/dotnet-dump/$(TargetConfiguration)/netcoreapp2.1/publish/dotnet-dump.dll
diff --git a/src/SOS/SOS.UnitTests/SOS.UnitTests.csproj b/src/SOS/SOS.UnitTests/SOS.UnitTests.csproj
index 828eaab8b7..5443f467eb 100644
--- a/src/SOS/SOS.UnitTests/SOS.UnitTests.csproj
+++ b/src/SOS/SOS.UnitTests/SOS.UnitTests.csproj
@@ -1,5 +1,8 @@
+
+
+
netcoreapp2.0
false
@@ -36,4 +39,5 @@
+
diff --git a/src/tests/Microsoft.Diagnostics.NETCore.Client/CommonHelper.cs b/src/tests/Microsoft.Diagnostics.NETCore.Client/CommonHelper.cs
index d1109110fb..86f0ec197d 100644
--- a/src/tests/Microsoft.Diagnostics.NETCore.Client/CommonHelper.cs
+++ b/src/tests/Microsoft.Diagnostics.NETCore.Client/CommonHelper.cs
@@ -2,19 +2,18 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
-using System;
using System.IO;
-using System.Reflection;
using System.Runtime.InteropServices;
-using Microsoft.Diagnostics.TestHelpers;
-
namespace Microsoft.Diagnostics.NETCore.Client
{
public class CommonHelper
{
public static string HostExe = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ?
- "..\\..\\..\\..\\..\\.dotnet\\dotnet.exe" : "../../../../../.dotnet/dotnet";
+ (RuntimeInformation.ProcessArchitecture == Architecture.X86 ?
+ "..\\..\\..\\..\\..\\.dotnet\\x86\\dotnet.exe" :
+ "..\\..\\..\\..\\..\\.dotnet\\dotnet.exe") :
+ "../../../../../.dotnet/dotnet";
public static string GetTraceePath()
{
diff --git a/src/tests/Microsoft.Diagnostics.NETCore.Client/GetPublishedProcessesTests.cs b/src/tests/Microsoft.Diagnostics.NETCore.Client/GetPublishedProcessesTests.cs
index 7ecc0e8281..bb31623b62 100644
--- a/src/tests/Microsoft.Diagnostics.NETCore.Client/GetPublishedProcessesTests.cs
+++ b/src/tests/Microsoft.Diagnostics.NETCore.Client/GetPublishedProcessesTests.cs
@@ -4,19 +4,12 @@
using System;
using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
-using System.Threading;
-using System.Runtime.InteropServices;
using Xunit;
using Xunit.Abstractions;
-using Microsoft.Diagnostics.TestHelpers;
-using Microsoft.Diagnostics.NETCore.Client;
-
namespace Microsoft.Diagnostics.NETCore.Client
{
-
+
///
/// Suite of tests that test top-level commands
///