diff --git a/Directory.Build.props b/Directory.Build.props
index 1683a4c99e3..3da3ed640c0 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -6,7 +6,7 @@
$(CopyrightNetFoundation)
MIT
-
+
Microsoft® Build Tools®
Debug;Release;Debug-MONO;Release-MONO;MachineIndependent
@@ -16,19 +16,6 @@
net472
-
- windows
- linux
- osx
-
- bsd
-
- false
- true
-
- $(OS)
-
-
$(RepoRoot).dotnet\
@@ -49,9 +36,10 @@
CS1701 and CS1702 are by default ignored by Microsoft.NET.Sdk, but if you define the NoWarn property in Directory.Build.props,
you don't get those defaults.
NU5125: Arcade uses licenseUrl when doing pack, which now causes NU5125 warning. This disables that warning until arcade can switch over.
+ SYSLIB0011: Removing binary formatter will happen as part of a larger .NET-wide effort.
-->
- $(NoWarn);NU1603;NU5105;NU5125;1701;1702
+ $(NoWarn);NU1603;NU5105;NU5125;1701;1702;SYSLIB0011
diff --git a/eng/Packages.props b/eng/Packages.props
index ebd0d0e551f..07d71a3583c 100644
--- a/eng/Packages.props
+++ b/eng/Packages.props
@@ -28,6 +28,7 @@
+
@@ -46,6 +47,10 @@
+
+
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index e1d14d55a01..9b1550fa2f0 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,17 +1,17 @@
-
+
https://github.com/dotnet/arcade
- 7ee8c2b620e66b3762d7a5a688dee8238770c86a
+ 6813f5aa511a7a4498fa217a54219b5704a01f83
https://github.com/NuGet/NuGet.Client
719f110bf31709b7f8bdf1ab74024450936d866c
-
+
https://github.com/dotnet/roslyn
-
+ e9fd4dc7d74932c0d4b042251bc5a88bb5b3c437
diff --git a/eng/Versions.props b/eng/Versions.props
index 8e618b92c93..b7c63418c88 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -23,8 +23,10 @@
- 3.1.100
- 3.3.1-beta3-final
+
+ 5.0.102
+ 3.9.0-2.20574.26
5.9.0-preview.3.7016
diff --git a/eng/cibuild_bootstrapped_msbuild.ps1 b/eng/cibuild_bootstrapped_msbuild.ps1
index ca978944bd6..8fb7f590527 100644
--- a/eng/cibuild_bootstrapped_msbuild.ps1
+++ b/eng/cibuild_bootstrapped_msbuild.ps1
@@ -95,7 +95,7 @@ try {
else
{
$buildToolPath = $dotnetExePath
- $buildToolCommand = Join-Path $bootstrapRoot "netcoreapp2.1\MSBuild\MSBuild.dll"
+ $buildToolCommand = Join-Path $bootstrapRoot "net5.0\MSBuild\MSBuild.dll"
$buildToolFramework = "netcoreapp2.1"
}
diff --git a/eng/cibuild_bootstrapped_msbuild.sh b/eng/cibuild_bootstrapped_msbuild.sh
index 6688317ada3..23987f994d9 100755
--- a/eng/cibuild_bootstrapped_msbuild.sh
+++ b/eng/cibuild_bootstrapped_msbuild.sh
@@ -62,7 +62,7 @@ bootstrapRoot="$Stage1Dir/bin/bootstrap"
if [ $host_type = "core" ]
then
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
- _InitializeBuildToolCommand="$bootstrapRoot/netcoreapp2.1/MSBuild/MSBuild.dll"
+ _InitializeBuildToolCommand="$bootstrapRoot/net5.0/MSBuild/MSBuild.dll"
_InitializeBuildToolFramework="netcoreapp2.1"
elif [ $host_type = "mono" ]
then
diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1
deleted file mode 100644
index b8d84607b89..00000000000
--- a/eng/common/CheckSymbols.ps1
+++ /dev/null
@@ -1,158 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$true)][string] $SymbolToolPath # Full path to directory where dotnet symbol-tool was installed
-)
-
-Add-Type -AssemblyName System.IO.Compression.FileSystem
-
-function FirstMatchingSymbolDescriptionOrDefault {
- param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
- [string] $SymbolsPath
- )
-
- $FileName = [System.IO.Path]::GetFileName($FullPath)
- $Extension = [System.IO.Path]::GetExtension($FullPath)
-
- # Those below are potential symbol files that the `dotnet symbol` might
- # return. Which one will be returned depend on the type of file we are
- # checking and which type of file was uploaded.
-
- # The file itself is returned
- $SymbolPath = $SymbolsPath + "\" + $FileName
-
- # PDB file for the module
- $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
-
- # PDB file for R2R module (created by crossgen)
- $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
-
- # DBG file for a .so library
- $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
-
- # DWARF file for a .dylib
- $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
-
- .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
-
- if (Test-Path $PdbPath) {
- return "PDB"
- }
- elseif (Test-Path $NGenPdb) {
- return "NGen PDB"
- }
- elseif (Test-Path $SODbg) {
- return "DBG for SO"
- }
- elseif (Test-Path $DylibDwarf) {
- return "Dwarf for Dylib"
- }
- elseif (Test-Path $SymbolPath) {
- return "Module"
- }
- else {
- return $null
- }
-}
-
-function CountMissingSymbols {
- param(
- [string] $PackagePath # Path to a NuGet package
- )
-
- # Ensure input file exist
- if (!(Test-Path $PackagePath)) {
- throw "Input file does not exist: $PackagePath"
- }
-
- # Extensions for which we'll look for symbols
- $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
-
- # How many files are missing symbol information
- $MissingSymbols = 0
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $PackageGuid = New-Guid
- $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
- $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
-
- [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
-
- # Makes easier to reference `symbol tool`
- Push-Location $SymbolToolPath
-
- Get-ChildItem -Recurse $ExtractPath |
- Where-Object {$RelevantExtensions -contains $_.Extension} |
- ForEach-Object {
- if ($_.FullName -Match "\\ref\\") {
- Write-Host "`t Ignoring reference assembly file" $_.FullName
- return
- }
-
- $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
- $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
-
- Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
-
- if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
- Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
- }
- else {
- $MissingSymbols++
-
- if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
- Write-Host "No symbols found on MSDL or SymWeb!"
- }
- else {
- if ($SymbolsOnMSDL -eq $null) {
- Write-Host "No symbols found on MSDL!"
- }
- else {
- Write-Host "No symbols found on SymWeb!"
- }
- }
- }
- }
-
- Pop-Location
-
- return $MissingSymbols
-}
-
-function CheckSymbolsAvailable {
- if (Test-Path $ExtractPath) {
- Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
- }
-
- Get-ChildItem "$InputPath\*.nupkg" |
- ForEach-Object {
- $FileName = $_.Name
-
- # These packages from Arcade-Services include some native libraries that
- # our current symbol uploader can't handle. Below is a workaround until
- # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
- if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
- Write-Host "Ignoring Arcade-services file: $FileName"
- Write-Host
- return
- }
- elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
- Write-Host "Ignoring Arcade-services file: $FileName"
- Write-Host
- return
- }
-
- Write-Host "Validating $FileName "
- $Status = CountMissingSymbols "$InputPath\$FileName"
-
- if ($Status -ne 0) {
- Write-Error "Missing symbols for $Status modules in the package $FileName"
- }
-
- Write-Host
- }
-}
-
-CheckSymbolsAvailable
diff --git a/eng/common/PublishToPackageFeed.proj b/eng/common/PublishToPackageFeed.proj
deleted file mode 100644
index a1b1333723e..00000000000
--- a/eng/common/PublishToPackageFeed.proj
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-
-
-
- netcoreapp2.1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
- https://dotnetfeed.blob.core.windows.net/arcade-validation/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore-tooling/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-entityframeworkcore/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-extensions/index.json
- https://dotnetfeed.blob.core.windows.net/dotnet-coreclr/index.json
- https://dotnetfeed.blob.core.windows.net/dotnet-sdk/index.json
- https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json
- https://dotnetfeed.blob.core.windows.net/dotnet-toolset/index.json
- https://dotnetfeed.blob.core.windows.net/dotnet-windowsdesktop/index.json
- https://dotnetfeed.blob.core.windows.net/nuget-nugetclient/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-entityframework6/index.json
- https://dotnetfeed.blob.core.windows.net/aspnet-blazor/index.json
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj
deleted file mode 100644
index 5d55e312b01..00000000000
--- a/eng/common/PublishToSymbolServers.proj
+++ /dev/null
@@ -1,82 +0,0 @@
-
-
-
-
-
- netcoreapp2.1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 3650
- true
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 83218ad7e72..a0b5fc37f43 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -146,16 +146,16 @@ $userName = "dn-bot"
# Insert credential nodes for Maestro's private feeds
InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
-$dotnet3Source = $sources.SelectSingleNode("add[@key='dotnet3']")
-if ($dotnet3Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
-}
-
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
-$doc.Save($filename)
\ No newline at end of file
+$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']")
+if ($dotnet5Source -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+}
+
+$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index 751863d5006..2734601c13c 100644
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -81,53 +81,52 @@ fi
PackageSources=()
-# Ensure dotnet3-internal and dotnet3-internal-transport are in the packageSources if the public dotnet3 feeds are present
-grep -i "" $ConfigFile
+ grep -i ""
+ PackageSourceTemplate="${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
- PackageSources+=('dotnet3-internal')
+ PackageSources+=('dotnet3.1-internal')
- grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding dotnet3-internal-transport to the packageSources."
+ echo "Adding dotnet3.1-internal-transport to the packageSources."
PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ PackageSourceTemplate="${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
- PackageSources+=('dotnet3-internal-transport')
+ PackageSources+=('dotnet3.1-internal-transport')
fi
-# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
-grep -i ""
+ PackageSourceTemplate="${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
- PackageSources+=('dotnet3.1-internal')
+ PackageSources+=('dotnet5-internal')
- grep -i "" $ConfigFile
+ grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding dotnet3.1-internal-transport to the packageSources."
+ echo "Adding dotnet5-internal-transport to the packageSources."
PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ PackageSourceTemplate="${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
- PackageSources+=('dotnet3.1-internal-transport')
+ PackageSources+=('dotnet5-internal-transport')
fi
# I want things split line by line
@@ -165,4 +164,4 @@ if [ "$?" == "0" ]; then
echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
fi
done
-fi
\ No newline at end of file
+fi
diff --git a/eng/common/SigningValidation.proj b/eng/common/SigningValidation.proj
deleted file mode 100644
index 3d0ac80af3f..00000000000
--- a/eng/common/SigningValidation.proj
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-
-
-
- netcoreapp2.1
-
-
-
-
-
-
-
- $(NuGetPackageRoot)Microsoft.DotNet.SignCheck\$(SignCheckVersion)\tools\Microsoft.DotNet.SignCheck.exe
-
- $(PackageBasePath)
- signcheck.log
- signcheck.errors.log
- signcheck.exclusions.txt
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/eng/common/SourceLinkValidation.ps1 b/eng/common/SourceLinkValidation.ps1
deleted file mode 100644
index cb2d28cb99e..00000000000
--- a/eng/common/SourceLinkValidation.ps1
+++ /dev/null
@@ -1,184 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$true)][string] $SourceLinkToolPath, # Full path to directory where dotnet SourceLink CLI was installed
- [Parameter(Mandatory=$true)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
- [Parameter(Mandatory=$true)][string] $GHCommit # GitHub commit SHA used to build the packages
-)
-
-# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
-# in the repository at a specific commit point. This is populated by inserting
-# all files present in the repo at a specific commit point.
-$global:RepoFiles = @{}
-
-$ValidatePackage = {
- param(
- [string] $PackagePath # Full path to a Symbols.NuGet package
- )
-
- # Ensure input file exist
- if (!(Test-Path $PackagePath)) {
- throw "Input file does not exist: $PackagePath"
- }
-
- # Extensions for which we'll look for SourceLink information
- # For now we'll only care about Portable & Embedded PDBs
- $RelevantExtensions = @(".dll", ".exe", ".pdb")
-
- Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... "
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
- $FailedFiles = 0
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath);
-
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $FileName = $_.FullName
- $Extension = [System.IO.Path]::GetExtension($_.Name)
- $FakeName = -Join((New-Guid), $Extension)
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
-
- # We ignore resource DLLs
- if ($FileName.EndsWith(".resources.dll")) {
- return
- }
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
-
- $ValidateFile = {
- param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $RealPath,
- [ref] $FailedFiles
- )
-
- # Makes easier to reference `sourcelink cli`
- Push-Location $using:SourceLinkToolPath
-
- $SourceLinkInfos = .\sourcelink.exe print-urls $FullPath | Out-String
-
- if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
- $NumFailedLinks = 0
-
- # We only care about Http addresses
- $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
-
- if ($Matches.Count -ne 0) {
- $Matches.Value |
- ForEach-Object {
- $Link = $_
- $CommitUrl = -Join("https://raw.githubusercontent.com/", $using:GHRepoName, "/", $using:GHCommit, "/")
- $FilePath = $Link.Replace($CommitUrl, "")
- $Status = 200
- $Cache = $using:RepoFiles
-
- if ( !($Cache.ContainsKey($FilePath)) ) {
- try {
- $Uri = $Link -as [System.URI]
-
- # Only GitHub links are valid
- if ($Uri.AbsoluteURI -ne $null -and $Uri.Host -match "github") {
- $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
- }
- else {
- $Status = 0
- }
- }
- catch {
- $Status = 0
- }
- }
-
- if ($Status -ne 200) {
- if ($NumFailedLinks -eq 0) {
- if ($FailedFiles.Value -eq 0) {
- Write-Host
- }
-
- Write-Host "`tFile $RealPath has broken links:"
- }
-
- Write-Host "`t`tFailed to retrieve $Link"
-
- $NumFailedLinks++
- }
- }
- }
-
- if ($NumFailedLinks -ne 0) {
- $FailedFiles.value++
- $global:LASTEXITCODE = 1
- }
- }
-
- Pop-Location
- }
-
- &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
- }
-
- $zip.Dispose()
-
- if ($FailedFiles -eq 0) {
- Write-Host "Passed."
- }
-}
-
-function ValidateSourceLinkLinks {
- if (!($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) {
- Write-Host "GHRepoName should be in the format /"
- $global:LASTEXITCODE = 1
- return
- }
-
- if (!($GHCommit -Match "^[0-9a-fA-F]{40}$")) {
- Write-Host "GHCommit should be a 40 chars hexadecimal string"
- $global:LASTEXITCODE = 1
- return
- }
-
- $RepoTreeURL = -Join("https://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1")
- $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript")
-
- try {
- # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
- $Data = Invoke-WebRequest $RepoTreeURL | ConvertFrom-Json | Select-Object -ExpandProperty tree
-
- foreach ($file in $Data) {
- $Extension = [System.IO.Path]::GetExtension($file.path)
-
- if ($CodeExtensions.Contains($Extension)) {
- $RepoFiles[$file.path] = 1
- }
- }
- }
- catch {
- Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL"
- $global:LASTEXITCODE = 1
- return
- }
-
- if (Test-Path $ExtractPath) {
- Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
- }
-
- # Process each NuGet package in parallel
- $Jobs = @()
- Get-ChildItem "$InputPath\*.symbols.nupkg" |
- ForEach-Object {
- $Jobs += Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName
- }
-
- foreach ($Job in $Jobs) {
- Wait-Job -Id $Job.Id | Receive-Job
- }
-}
-
-Measure-Command { ValidateSourceLinkLinks }
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index e001ccb481c..1fd7f686fae 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -7,6 +7,7 @@ Param(
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
+ [bool] $useDefaultDotnetInstall = $false,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch][Alias('b')]$build,
@@ -18,56 +19,68 @@ Param(
[switch] $sign,
[switch] $pack,
[switch] $publish,
+ [switch] $clean,
[switch][Alias('bl')]$binaryLog,
+ [switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
[switch] $prepareMachine,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
-. $PSScriptRoot\tools.ps1
-
+# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file
+# some computer has this env var defined (e.g. Some HP)
+if($env:Platform) {
+ $env:Platform=""
+}
function Print-Usage() {
- Write-Host "Common settings:"
- Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)"
- Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
- Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
- Write-Host " -binaryLog Output binary log (short: -bl)"
- Write-Host " -help Print help and exit"
- Write-Host ""
-
- Write-Host "Actions:"
- Write-Host " -restore Restore dependencies (short: -r)"
- Write-Host " -build Build solution (short: -b)"
- Write-Host " -rebuild Rebuild solution"
- Write-Host " -deploy Deploy built VSIXes"
- Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
- Write-Host " -test Run all unit tests in the solution (short: -t)"
- Write-Host " -integrationTest Run all integration tests in the solution"
- Write-Host " -performanceTest Run all performance tests in the solution"
- Write-Host " -pack Package build outputs into NuGet packages and Willow components"
- Write-Host " -sign Sign build outputs"
- Write-Host " -publish Publish artifacts (e.g. symbols)"
- Write-Host ""
-
- Write-Host "Advanced settings:"
- Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
- Write-Host " -ci Set when running on CI server"
- Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
- Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
- Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
- Write-Host ""
-
- Write-Host "Command line arguments not listed above are passed thru to msbuild."
- Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
+ Write-Host "Common settings:"
+ Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)"
+ Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
+ Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ Write-Host " -binaryLog Output binary log (short: -bl)"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Actions:"
+ Write-Host " -restore Restore dependencies (short: -r)"
+ Write-Host " -build Build solution (short: -b)"
+ Write-Host " -rebuild Rebuild solution"
+ Write-Host " -deploy Deploy built VSIXes"
+ Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
+ Write-Host " -test Run all unit tests in the solution (short: -t)"
+ Write-Host " -integrationTest Run all integration tests in the solution"
+ Write-Host " -performanceTest Run all performance tests in the solution"
+ Write-Host " -pack Package build outputs into NuGet packages and Willow components"
+ Write-Host " -sign Sign build outputs"
+ Write-Host " -publish Publish artifacts (e.g. symbols)"
+ Write-Host " -clean Clean the solution"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
+ Write-Host " -ci Set when running on CI server"
+ Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
+ Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
+ Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
+ Write-Host ""
+
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+ Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
}
+. $PSScriptRoot\tools.ps1
+
function InitializeCustomToolset {
if (-not $restore) {
return
}
- $script = Join-Path $EngRoot "restore-toolset.ps1"
+ $script = Join-Path $EngRoot 'restore-toolset.ps1'
if (Test-Path $script) {
. $script
@@ -78,8 +91,8 @@ function Build {
$toolsetBuildProj = InitializeToolset
InitializeCustomToolset
- $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "Build.binlog") } else { "" }
- $platformArg = if ($platform) { "/p:Platform=$platform" } else { "" }
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
+ $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
@@ -113,24 +126,27 @@ function Build {
}
try {
- if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
+ if ($clean) {
+ if (Test-Path $ArtifactsDir) {
+ Remove-Item -Recurse -Force $ArtifactsDir
+ Write-Host 'Artifacts directory deleted.'
+ }
+ exit 0
+ }
+
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
Print-Usage
exit 0
}
if ($ci) {
- $binaryLog = $true
+ if (-not $excludeCIBinarylog) {
+ $binaryLog = $true
+ }
$nodeReuse = $false
}
- # Import custom tools configuration, if present in the repo.
- # Note: Import in global scope so that the script set top-level variables without qualification.
- $configureToolsetScript = Join-Path $EngRoot "configure-toolset.ps1"
- if (Test-Path $configureToolsetScript) {
- . $configureToolsetScript
- }
-
- if (($restore) -and ($null -eq $env:DisableNativeToolsetInstalls)) {
+ if ($restore) {
InitializeNativeTools
}
@@ -138,7 +154,7 @@ try {
}
catch {
Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 6236fc4d38c..19849adbee3 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -26,14 +26,18 @@ usage()
echo " --pack Package build outputs into NuGet packages and Willow components"
echo " --sign Sign build outputs"
echo " --publish Publish artifacts (e.g. symbols)"
+ echo " --clean Clean the solution"
echo ""
echo "Advanced settings:"
echo " --projects Project or solution file(s) to build"
echo " --ci Set when running on CI server"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
+
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
@@ -62,19 +66,23 @@ publish=false
sign=false
public=false
ci=false
+clean=false
warn_as_error=true
node_reuse=true
binary_log=false
+exclude_ci_binary_log=false
pipelines_log=false
projects=''
configuration='Debug'
prepare_machine=false
verbosity='minimal'
+runtime_source_feed=''
+runtime_source_feed_key=''
+use_default_dotnet_install=false
properties=''
-
while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
case "$opt" in
@@ -82,6 +90,9 @@ while [[ $# > 0 ]]; do
usage
exit 0
;;
+ -clean)
+ clean=true
+ ;;
-configuration|-c)
configuration=$2
shift
@@ -93,6 +104,9 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
+ -excludeCIBinarylog|-nobl)
+ exclude_ci_binary_log=true
+ ;;
-pipelineslog|-pl)
pipelines_log=true
;;
@@ -141,6 +155,18 @@ while [[ $# > 0 ]]; do
node_reuse=$2
shift
;;
+ -runtimesourcefeed)
+ runtime_source_feed=$2
+ shift
+ ;;
+ -runtimesourcefeedkey)
+ runtime_source_feed_key=$2
+ shift
+ ;;
+ -usedefaultdotnetinstall)
+ use_default_dotnet_install=$2
+ shift
+ ;;
*)
properties="$properties $1"
;;
@@ -151,8 +177,10 @@ done
if [[ "$ci" == true ]]; then
pipelines_log=true
- binary_log=true
node_reuse=false
+ if [[ "$exclude_ci_binary_log" == false ]]; then
+ binary_log=true
+ fi
fi
. "$scriptroot/tools.sh"
@@ -196,20 +224,15 @@ function Build {
ExitWithExitCode 0
}
-# Import custom tools configuration, if present in the repo.
-configure_toolset_script="$eng_root/configure-toolset.sh"
-if [[ -a "$configure_toolset_script" ]]; then
- . "$configure_toolset_script"
-fi
-
-# TODO: https://github.com/dotnet/arcade/issues/1468
-# Temporary workaround to avoid breaking change.
-# Remove once repos are updated.
-if [[ -n "${useInstalledDotNetCli:-}" ]]; then
- use_installed_dotnet_cli="$useInstalledDotNetCli"
+if [[ "$clean" == true ]]; then
+ if [ -d "$artifacts_dir" ]; then
+ rm -rf $artifacts_dir
+ echo "Artifacts directory deleted."
+ fi
+ exit 0
fi
-if [[ "$restore" == true && -z ${DisableNativeToolsetInstalls:-} ]]; then
+if [[ "$restore" == true ]]; then
InitializeNativeTools
fi
diff --git a/eng/common/cross/android/arm/toolchain.cmake b/eng/common/cross/android/arm/toolchain.cmake
deleted file mode 100644
index a7e1c73501b..00000000000
--- a/eng/common/cross/android/arm/toolchain.cmake
+++ /dev/null
@@ -1,41 +0,0 @@
-set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
-set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
-set(CLR_CMAKE_PLATFORM_ANDROID "Android")
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_VERSION 1)
-set(CMAKE_SYSTEM_PROCESSOR arm)
-
-## Specify the toolchain
-set(TOOLCHAIN "arm-linux-androideabi")
-set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
-set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
-
-find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
-find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
-find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
-
-add_compile_options(--sysroot=${CROSS_ROOTFS})
-add_compile_options(-fPIE)
-add_compile_options(-mfloat-abi=soft)
-include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/)
-include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/)
-
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-
-set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/cross/android/arm64/toolchain.cmake b/eng/common/cross/android/arm64/toolchain.cmake
deleted file mode 100644
index 29415899c1c..00000000000
--- a/eng/common/cross/android/arm64/toolchain.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
-set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
-set(CLR_CMAKE_PLATFORM_ANDROID "Android")
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_VERSION 1)
-set(CMAKE_SYSTEM_PROCESSOR aarch64)
-
-## Specify the toolchain
-set(TOOLCHAIN "aarch64-linux-android")
-set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
-set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
-
-find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
-find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
-find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
-
-add_compile_options(--sysroot=${CROSS_ROOTFS})
-add_compile_options(-fPIE)
-
-## Needed for Android or bionic specific conditionals
-add_compile_options(-D__ANDROID__)
-add_compile_options(-D__BIONIC__)
-
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-
-set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/cross/arm64/tizen-build-rootfs.sh b/eng/common/cross/arm64/tizen-build-rootfs.sh
new file mode 100644
index 00000000000..13bfddb5e2a
--- /dev/null
+++ b/eng/common/cross/arm64/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<>Start configuring Tizen rootfs"
+ln -sfn asm-arm64 ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "</dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen/
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
new file mode 100644
index 00000000000..af7c8be0590
--- /dev/null
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleaarch64)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh
index 87c48e78fbb..9a4438af61c 100755
--- a/eng/common/cross/armel/tizen-build-rootfs.sh
+++ b/eng/common/cross/armel/tizen-build-rootfs.sh
@@ -9,13 +9,6 @@ if [[ -z "$ROOTFS_DIR" ]]; then
exit 1;
fi
-# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.)
-# hk0110
-if [ -d "$ROOTFS_DIR" ]; then
- umount $ROOTFS_DIR/*
- rm -rf $ROOTFS_DIR
-fi
-
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
mkdir -p $TIZEN_TMP_DIR
@@ -37,8 +30,6 @@ rm -rf $TIZEN_TMP_DIR
# Configure Tizen rootfs
echo ">>Start configuring Tizen rootfs"
-rm ./usr/lib/libunwind.so
-ln -s libunwind.so.8 ./usr/lib/libunwind.so
ln -sfn asm-arm ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
echo "< $__ToolchainDir/sysroot/android_platform
-echo Now run:
-echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0
+for path in $(wget -qO- http://termux.net/dists/stable/main/binary-$__AndroidArch/Packages |\
+ grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
+ if [[ "$path" != "Filename:" ]]; then
+ echo "Working on: $path"
+ wget -qO- http://termux.net/$path | dpkg -x - "$__TmpDir"
+ fi
+done
+
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+
+# Generate platform file for build.sh script to assign to __DistroRid
+echo "Generating platform file..."
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
+
+echo "Now to build coreclr, libraries and installers; run:"
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory coreclr
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory libraries
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory installer
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index d7d5d7d5f44..ffdff38542e 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -1,19 +1,26 @@
#!/usr/bin/env bash
+set -e
+
usage()
{
- echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
+ echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
- echo "LinuxCodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
- echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine"
+ echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo " for FreeBSD can be: freebsd11 or freebsd12."
+ echo " for illumos can be: illumos."
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
+ echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
exit 1
}
-__LinuxCodeName=xenial
+__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__InitialDir=$PWD
__BuildArch=arm
+__AlpineArch=armv7
+__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="liblldb-3.9-dev"
@@ -25,8 +32,10 @@ __UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
-__AlpinePackages+=" lldb-dev"
-__AlpinePackages+=" llvm-dev"
+__AlpinePackagesEdgeCommunity=" lldb-dev"
+__AlpinePackagesEdgeMain=" llvm10-libs"
+__AlpinePackagesEdgeMain+=" python3"
+__AlpinePackagesEdgeMain+=" libedit"
# symlinks fixer
__UbuntuPackages+=" symlinks"
@@ -52,6 +61,21 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
+__FreeBSDBase="12.1-RELEASE"
+__FreeBSDPkg="1.12.0"
+__FreeBSDPackages="libunwind"
+__FreeBSDPackages+=" icu"
+__FreeBSDPackages+=" libinotify"
+__FreeBSDPackages+=" lttng-ust"
+__FreeBSDPackages+=" krb5"
+
+__IllumosPackages="icu-64.2nb2"
+__IllumosPackages+=" mit-krb5-1.16.2nb4"
+__IllumosPackages+=" openssl-1.1.1e"
+__IllumosPackages+=" zlib-1.2.11"
+
+__UseMirror=0
+
__UnprocessedBuildArgs=
while :; do
if [ $# -le 0 ]; then
@@ -67,7 +91,7 @@ while :; do
arm)
__BuildArch=arm
__UbuntuArch=armhf
- __AlpineArch=armhf
+ __AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
@@ -80,7 +104,7 @@ while :; do
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
- __LinuxCodeName=jessie
+ __CodeName=jessie
;;
x86)
__BuildArch=x86
@@ -109,53 +133,66 @@ while :; do
unset __LLDB_Package
;;
trusty) # Ubuntu 14.04
- if [ "$__LinuxCodeName" != "jessie" ]; then
- __LinuxCodeName=trusty
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=trusty
fi
;;
xenial) # Ubuntu 16.04
- if [ "$__LinuxCodeName" != "jessie" ]; then
- __LinuxCodeName=xenial
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
- if [ "$__LinuxCodeName" != "jessie" ]; then
- __LinuxCodeName=zesty
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
- if [ "$__LinuxCodeName" != "jessie" ]; then
- __LinuxCodeName=bionic
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=bionic
fi
;;
jessie) # Debian 8
- __LinuxCodeName=jessie
+ __CodeName=jessie
__UbuntuRepo="http://ftp.debian.org/debian/"
;;
stretch) # Debian 9
- __LinuxCodeName=stretch
+ __CodeName=stretch
__UbuntuRepo="http://ftp.debian.org/debian/"
__LLDB_Package="liblldb-6.0-dev"
;;
buster) # Debian 10
- __LinuxCodeName=buster
+ __CodeName=buster
__UbuntuRepo="http://ftp.debian.org/debian/"
__LLDB_Package="liblldb-6.0-dev"
;;
tizen)
- if [ "$__BuildArch" != "armel" ]; then
- echo "Tizen is available only for armel."
+ if [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then
+ echo "Tizen is available only for armel and arm64."
usage;
exit 1;
fi
- __LinuxCodeName=
+ __CodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine)
- __LinuxCodeName=alpine
+ __CodeName=alpine
__UbuntuRepo=
;;
+ freebsd11)
+ __FreeBSDBase="11.3-RELEASE"
+ ;&
+ freebsd12)
+ __CodeName=freebsd
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
+ illumos)
+ __CodeName=illumos
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
--skipunmount)
__SkipUnmount=1
;;
@@ -163,6 +200,9 @@ while :; do
shift
__RootfsDir=$1
;;
+ --use-mirror)
+ __UseMirror=1
+ ;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
@@ -186,46 +226,121 @@ fi
if [ -d "$__RootfsDir" ]; then
if [ $__SkipUnmount == 0 ]; then
- umount $__RootfsDir/*
+ umount $__RootfsDir/* || true
fi
rm -rf $__RootfsDir
fi
-if [[ "$__LinuxCodeName" == "alpine" ]]; then
+mkdir -p $__RootfsDir
+__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+
+if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.9.1
- __AlpineVersion=3.7
+ __AlpineVersion=3.9
__ApkToolsDir=$(mktemp -d)
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
mkdir -p $__RootfsDir/usr/bin
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
+
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
- -X http://dl-cdn.alpinelinux.org/alpine/edge/testing \
- -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
add $__AlpinePackages
+
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackagesEdgeMain
+
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/community \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackagesEdgeCommunity
+
rm -r $__ApkToolsDir
-elif [[ -n $__LinuxCodeName ]]; then
- qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo
- cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list
+elif [[ "$__CodeName" == "freebsd" ]]; then
+ mkdir -p $__RootfsDir/usr/local/etc
+ wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ # For now, ask for 11 ABI even on 12. This can be revisited later.
+ echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
+ echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
+ mkdir -p $__RootfsDir/tmp
+ # get and build package manager
+ wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf -
+ cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
+ # needed for install to succeed
+ mkdir -p $__RootfsDir/host/etc
+ ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
+ rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
+ # install packages we need.
+ INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
+ INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
+elif [[ "$__CodeName" == "illumos" ]]; then
+ mkdir "$__RootfsDir/tmp"
+ pushd "$__RootfsDir/tmp"
+ JOBS="$(getconf _NPROCESSORS_ONLN)"
+ echo "Downloading sysroot."
+ wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ echo "Building binutils. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ mkdir build-binutils && cd build-binutils
+ ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir"
+ make -j "$JOBS" && make install && cd ..
+ echo "Building gcc. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ CFLAGS="-fPIC"
+ CXXFLAGS="-fPIC"
+ CXXFLAGS_FOR_TARGET="-fPIC"
+ CFLAGS_FOR_TARGET="-fPIC"
+ export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
+ mkdir build-gcc && cd build-gcc
+ ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
+ --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
+ --disable-libquadmath-support --disable-shared --enable-tls
+ make -j "$JOBS" && make install && cd ..
+ BaseUrl=https://pkgsrc.joyent.com
+ if [[ "$__UseMirror" == 1 ]]; then
+ BaseUrl=http://pkgsrc.smartos.skylime.net
+ fi
+ BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All
+ echo "Downloading dependencies."
+ read -ra array <<<"$__IllumosPackages"
+ for package in "${array[@]}"; do
+ echo "Installing $package..."
+ wget "$BaseUrl"/"$package".tgz
+ ar -x "$package".tgz
+ tar --skip-old-files -xzf "$package".tmp.tgz -C "$__RootfsDir" 2>/dev/null
+ done
+ echo "Cleaning up temporary files."
+ popd
+ rm -rf "$__RootfsDir"/{tmp,+*}
+ mkdir -p "$__RootfsDir"/usr/include/net
+ mkdir -p "$__RootfsDir"/usr/include/netpacket
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+elif [[ -n $__CodeName ]]; then
+ qemu-debootstrap --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo
+ cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list
chroot $__RootfsDir apt-get update
chroot $__RootfsDir apt-get -f -y install
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
chroot $__RootfsDir symlinks -cr /usr
if [ $__SkipUnmount == 0 ]; then
- umount $__RootfsDir/*
+ umount $__RootfsDir/* || true
fi
- if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then
+ if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
popd
fi
-elif [ "$__Tizen" == "tizen" ]; then
+elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
else
echo "Unsupported target platform."
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
index 071d4112419..137736c0a27 100644
--- a/eng/common/cross/toolchain.cmake
+++ b/eng/common/cross/toolchain.cmake
@@ -1,18 +1,27 @@
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
-set(CMAKE_SYSTEM_NAME Linux)
+if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
+ set(CMAKE_SYSTEM_NAME FreeBSD)
+elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
+ set(CMAKE_SYSTEM_NAME SunOS)
+ set(ILLUMOS 1)
+else()
+ set(CMAKE_SYSTEM_NAME Linux)
+endif()
set(CMAKE_SYSTEM_VERSION 1)
if(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if("$ENV{__DistroRid}" MATCHES "tizen.*")
- set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
+ set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
- if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
+ elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
@@ -24,65 +33,141 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
else()
set(TOOLCHAIN "aarch64-linux-gnu")
endif()
+ if("$ENV{__DistroRid}" MATCHES "tizen.*")
+ set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
+ endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(CMAKE_SYSTEM_PROCESSOR i686)
set(TOOLCHAIN "i686-linux-gnu")
+elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
+ set(CMAKE_SYSTEM_PROCESSOR "x86_64")
+ set(triple "x86_64-unknown-freebsd11")
+elseif (ILLUMOS)
+ set(CMAKE_SYSTEM_PROCESSOR "x86_64")
+ set(TOOLCHAIN "x86_64-illumos")
else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
endif()
+if(DEFINED ENV{TOOLCHAIN})
+ set(TOOLCHAIN $ENV{TOOLCHAIN})
+endif()
+
# Specify include paths
-if(TARGET_ARCH_NAME STREQUAL "armel")
- if(DEFINED TIZEN_TOOLCHAIN)
+if(DEFINED TIZEN_TOOLCHAIN)
+ if(TARGET_ARCH_NAME STREQUAL "armel")
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
endif()
+ if(TARGET_ARCH_NAME STREQUAL "arm64")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
+ endif()
endif()
-# add_compile_param - adds only new options without duplicates.
-# arg0 - list with result options, arg1 - list with new options.
-# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode.
-macro(add_compile_param)
- if(NOT ${ARGC} MATCHES "^(2|3)$")
- message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.")
- endif()
- foreach(OPTION ${ARGV1})
- if(NOT ${ARGV0} MATCHES "${OPTION}($| )")
- set(${ARGV0} "${${ARGV0}} ${OPTION}")
- if(${ARGC} EQUAL "3") # CACHE FORCE mode
- set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE)
- endif()
+if("$ENV{__DistroRid}" MATCHES "android.*")
+ if(TARGET_ARCH_NAME STREQUAL "arm")
+ set(ANDROID_ABI armeabi-v7a)
+ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(ANDROID_ABI arm64-v8a)
endif()
- endforeach()
-endmacro()
+
+ # extract platform number required by the NDK's toolchain
+ string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "$ENV{__DistroRid}")
+
+ set(ANDROID_TOOLCHAIN clang)
+ set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
+ set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib")
+ set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include")
+
+ # include official NDK toolchain script
+ include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
+elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
+ # we cross-compile by instructing clang
+ set(CMAKE_C_COMPILER_TARGET ${triple})
+ set(CMAKE_CXX_COMPILER_TARGET ${triple})
+ set(CMAKE_ASM_COMPILER_TARGET ${triple})
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+elseif(ILLUMOS)
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ include_directories(SYSTEM ${CROSS_ROOTFS}/include)
+
+ set(TOOLSET_PREFIX ${TOOLCHAIN}-)
+ function(locate_toolchain_exec exec var)
+ string(TOUPPER ${exec} EXEC_UPPERCASE)
+ if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
+ set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
+ return()
+ endif()
+
+ find_program(EXEC_LOCATION_${exec}
+ NAMES
+ "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
+ "${TOOLSET_PREFIX}${exec}")
+
+ if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
+ message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
+ endif()
+ set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
+ endfunction()
+
+ set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+
+ locate_toolchain_exec(gcc CMAKE_C_COMPILER)
+ locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
+
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
+else()
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+endif()
# Specify link flags
-add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}")
-add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr")
-add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}")
-add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold")
+
+function(add_toolchain_linker_flag Flag)
+ set(Config "${ARGV1}")
+ set(CONFIG_SUFFIX "")
+ if (NOT Config STREQUAL "")
+ set(CONFIG_SUFFIX "_${Config}")
+ endif()
+ set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
+ set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
+endfunction()
+
if(TARGET_ARCH_NAME STREQUAL "armel")
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
- add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
- add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib")
- add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib")
- add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
- add_compile_param(CROSS_LINK_FLAGS "-m32")
+ add_toolchain_linker_flag(-m32)
+elseif(ILLUMOS)
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
endif()
-add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
-add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
-add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
-
# Specify compile options
-add_compile_options("--sysroot=${CROSS_ROOTFS}")
-add_compile_options("--target=${TOOLCHAIN}")
-add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr")
-if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
+if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
@@ -90,20 +175,33 @@ endif()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
add_compile_options(-mthumb)
- add_compile_options(-mfpu=vfpv3)
+ if (NOT DEFINED CLR_ARM_FPU_TYPE)
+ set (CLR_ARM_FPU_TYPE vfpv3)
+ endif (NOT DEFINED CLR_ARM_FPU_TYPE)
+
+ add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE})
+ if (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+ set (CLR_ARM_FPU_CAPABILITY 0x7)
+ endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+
+ add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY})
+
if(TARGET_ARCH_NAME STREQUAL "armel")
add_compile_options(-mfloat-abi=softfp)
- if(DEFINED TIZEN_TOOLCHAIN)
- add_compile_options(-Wno-deprecated-declarations) # compile-time option
- add_compile_options(-D__extern_always_inline=inline) # compile-time option
- endif()
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_compile_options(-m32)
add_compile_options(-Wno-error=unused-command-line-argument)
endif()
-# Set LLDB include and library paths
+if(DEFINED TIZEN_TOOLCHAIN)
+ if(TARGET_ARCH_NAME MATCHES "^(armel|arm64)$")
+ add_compile_options(-Wno-deprecated-declarations) # compile-time option
+ add_compile_options(-D__extern_always_inline=inline) # compile-time option
+ endif()
+endif()
+
+# Set LLDB include and library paths for builds that need lldb.
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
if(TARGET_ARCH_NAME STREQUAL "x86")
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
@@ -131,7 +229,6 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
endif()
endif()
-set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index 06b65342528..d981d7bbf38 100755
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -68,7 +68,7 @@ function InstallDarcCli {
fi
fi
- local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
diff --git a/eng/common/dotnet-install-scripts/dotnet-install.ps1 b/eng/common/dotnet-install-scripts/dotnet-install.ps1
new file mode 100644
index 00000000000..f63b533f25a
--- /dev/null
+++ b/eng/common/dotnet-install-scripts/dotnet-install.ps1
@@ -0,0 +1,774 @@
+#
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020
+
+<#
+.SYNOPSIS
+ Installs dotnet cli
+.DESCRIPTION
+ Installs dotnet cli. If dotnet installation already exists in the given directory
+ it will update it only if the requested version differs from the one already installed.
+.PARAMETER Channel
+ Default: LTS
+ Download from the Channel specified. Possible values:
+ - Current - most current release
+ - LTS - most current supported release
+ - 2-part version in a format A.B - represents a specific release
+ examples: 2.0, 1.0
+ - Branch name
+ examples: release/2.0.0, Master
+ Note: The version parameter overrides the channel parameter.
+.PARAMETER Version
+ Default: latest
+ Represents a build version on specific channel. Possible values:
+ - latest - most latest build on specific channel
+ - coherent - most latest coherent build on specific channel
+ coherent applies only to SDK downloads
+ - 3-part version in a format A.B.C - represents specific version of build
+ examples: 2.0.0-preview2-006120, 1.1.0
+.PARAMETER InstallDir
+ Default: %LocalAppData%\Microsoft\dotnet
+ Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
+.PARAMETER Architecture
+ Default: - this value represents currently running OS architecture
+ Architecture of dotnet binaries to be installed.
+ Possible values are: , amd64, x64, x86, arm64, arm
+.PARAMETER SharedRuntime
+ This parameter is obsolete and may be removed in a future version of this script.
+ The recommended alternative is '-Runtime dotnet'.
+ Installs just the shared runtime bits, not the entire SDK.
+.PARAMETER Runtime
+ Installs just a shared runtime, not the entire SDK.
+ Possible values:
+ - dotnet - the Microsoft.NETCore.App shared runtime
+ - aspnetcore - the Microsoft.AspNetCore.App shared runtime
+ - windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime
+.PARAMETER DryRun
+ If set it will not perform installation but instead display what command line to use to consistently install
+ currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link
+ with specific version so that this command can be used deterministicly in a build script.
+ It also displays binaries location if you prefer to install or download it yourself.
+.PARAMETER NoPath
+ By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder.
+ If set it will display binaries location but not set any environment variable.
+.PARAMETER Verbose
+ Displays diagnostics information.
+.PARAMETER AzureFeed
+ Default: https://dotnetcli.azureedge.net/dotnet
+ This parameter typically is not changed by the user.
+ It allows changing the URL for the Azure feed used by this installer.
+.PARAMETER UncachedFeed
+ This parameter typically is not changed by the user.
+ It allows changing the URL for the Uncached feed used by this installer.
+.PARAMETER FeedCredential
+ Used as a query string to append to the Azure feed.
+ It allows changing the URL to use non-public blob storage accounts.
+.PARAMETER ProxyAddress
+ If set, the installer will use the proxy when making web requests
+.PARAMETER ProxyUseDefaultCredentials
+ Default: false
+ Use default credentials, when using proxy address.
+.PARAMETER ProxyBypassList
+ If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
+.PARAMETER SkipNonVersionedFiles
+ Default: false
+ Skips installing non-versioned files if they already exist, such as dotnet.exe.
+.PARAMETER NoCdn
+ Disable downloading from the Azure CDN, and use the uncached feed directly.
+.PARAMETER JSonFile
+ Determines the SDK version from a user specified global.json file
+ Note: global.json must have a value for 'SDK:Version'
+#>
+[cmdletbinding()]
+param(
+ [string]$Channel="LTS",
+ [string]$Version="Latest",
+ [string]$JSonFile,
+ [string]$InstallDir="",
+ [string]$Architecture="",
+ [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
+ [string]$Runtime,
+ [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
+ [switch]$SharedRuntime,
+ [switch]$DryRun,
+ [switch]$NoPath,
+ [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet",
+ [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet",
+ [string]$FeedCredential,
+ [string]$ProxyAddress,
+ [switch]$ProxyUseDefaultCredentials,
+ [string[]]$ProxyBypassList=@(),
+ [switch]$SkipNonVersionedFiles,
+ [switch]$NoCdn
+)
+
+Set-StrictMode -Version Latest
+$ErrorActionPreference="Stop"
+$ProgressPreference="SilentlyContinue"
+
+if ($NoCdn) {
+ $AzureFeed = $UncachedFeed
+}
+
+$BinFolderRelativePath=""
+
+if ($SharedRuntime -and (-not $Runtime)) {
+ $Runtime = "dotnet"
+}
+
+# example path with regex: shared/1.0.0-beta-12345/somepath
+$VersionRegEx="/\d+\.\d+[^/]+/"
+$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
+
+function Say($str) {
+ try
+ {
+ Write-Host "dotnet-install: $str"
+ }
+ catch
+ {
+ # Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
+ Write-Output "dotnet-install: $str"
+ }
+}
+
+function Say-Verbose($str) {
+ try
+ {
+ Write-Verbose "dotnet-install: $str"
+ }
+ catch
+ {
+ # Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
+ Write-Output "dotnet-install: $str"
+ }
+}
+
+function Say-Invocation($Invocation) {
+ $command = $Invocation.MyCommand;
+ $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ")
+ Say-Verbose "$command $args"
+}
+
+function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
+ $Attempts = 0
+
+ while ($true) {
+ try {
+ return $ScriptBlock.Invoke()
+ }
+ catch {
+ $Attempts++
+ if ($Attempts -lt $MaxAttempts) {
+ Start-Sleep $SecondsBetweenAttempts
+ }
+ else {
+ throw
+ }
+ }
+ }
+}
+
+function Get-Machine-Architecture() {
+ Say-Invocation $MyInvocation
+
+ # On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
+ # To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
+ # PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
+ # Possible values: amd64, x64, x86, arm64, arm
+
+ if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
+ {
+ return $ENV:PROCESSOR_ARCHITEW6432
+ }
+
+ return $ENV:PROCESSOR_ARCHITECTURE
+}
+
+function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
+ Say-Invocation $MyInvocation
+
+ switch ($Architecture.ToLower()) {
+ { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
+ { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
+ { $_ -eq "x86" } { return "x86" }
+ { $_ -eq "arm" } { return "arm" }
+ { $_ -eq "arm64" } { return "arm64" }
+ default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
+ }
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+function Get-Version-Info-From-Version-Text([string]$VersionText) {
+ Say-Invocation $MyInvocation
+
+ $Data = -split $VersionText
+
+ $VersionInfo = @{
+ CommitHash = $(if ($Data.Count -gt 1) { $Data[0] })
+ Version = $Data[-1] # last line is always the version number.
+ }
+ return $VersionInfo
+}
+
+function Load-Assembly([string] $Assembly) {
+ try {
+ Add-Type -Assembly $Assembly | Out-Null
+ }
+ catch {
+ # On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd.
+ # Loading the base class assemblies is not unnecessary as the types will automatically get resolved.
+ }
+}
+
+function GetHTTPResponse([Uri] $Uri)
+{
+ Invoke-With-Retry(
+ {
+
+ $HttpClient = $null
+
+ try {
+ # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet.
+ Load-Assembly -Assembly System.Net.Http
+
+ if(-not $ProxyAddress) {
+ try {
+ # Despite no proxy being explicitly specified, we may still be behind a default proxy
+ $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy;
+ if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) {
+ $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString
+ $ProxyUseDefaultCredentials = $true
+ }
+ } catch {
+ # Eat the exception and move forward as the above code is an attempt
+ # at resolving the DefaultProxy that may not have been a problem.
+ $ProxyAddress = $null
+ Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...")
+ }
+ }
+
+ if($ProxyAddress) {
+ $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
+ $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
+ Address=$ProxyAddress;
+ UseDefaultCredentials=$ProxyUseDefaultCredentials;
+ BypassList = $ProxyBypassList;
+ }
+ $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
+ }
+ else {
+
+ $HttpClient = New-Object System.Net.Http.HttpClient
+ }
+ # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
+ # 20 minutes allows it to work over much slower connections.
+ $HttpClient.Timeout = New-TimeSpan -Minutes 20
+ $Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
+ if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
+ # The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
+ $ErrorMsg = "Failed to download $Uri."
+ if ($Response -ne $null) {
+ $ErrorMsg += " $Response"
+ }
+
+ throw $ErrorMsg
+ }
+
+ return $Response
+ }
+ finally {
+ if ($HttpClient -ne $null) {
+ $HttpClient.Dispose()
+ }
+ }
+ })
+}
+
+function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
+ Say-Invocation $MyInvocation
+
+ $VersionFileUrl = $null
+ if ($Runtime -eq "dotnet") {
+ $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
+ }
+ # Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
+ elseif ($Runtime -eq "windowsdesktop") {
+ $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+ }
+ elseif (-not $Runtime) {
+ if ($Coherent) {
+ $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
+ }
+ else {
+ $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
+ }
+ }
+ else {
+ throw "Invalid value for `$Runtime"
+ }
+ try {
+ $Response = GetHTTPResponse -Uri $VersionFileUrl
+ }
+ catch {
+ throw "Could not resolve version information."
+ }
+ $StringContent = $Response.Content.ReadAsStringAsync().Result
+
+ switch ($Response.Content.Headers.ContentType) {
+ { ($_ -eq "application/octet-stream") } { $VersionText = $StringContent }
+ { ($_ -eq "text/plain") } { $VersionText = $StringContent }
+ { ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent }
+ default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." }
+ }
+
+ $VersionInfo = Get-Version-Info-From-Version-Text $VersionText
+
+ return $VersionInfo
+}
+
+function Parse-Jsonfile-For-Version([string]$JSonFile) {
+ Say-Invocation $MyInvocation
+
+ If (-Not (Test-Path $JSonFile)) {
+ throw "Unable to find '$JSonFile'"
+ }
+ try {
+ $JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
+ }
+ catch {
+ throw "Json file unreadable: '$JSonFile'"
+ }
+ if ($JSonContent) {
+ try {
+ $JSonContent.PSObject.Properties | ForEach-Object {
+ $PropertyName = $_.Name
+ if ($PropertyName -eq "version") {
+ $Version = $_.Value
+ Say-Verbose "Version = $Version"
+ }
+ }
+ }
+ catch {
+ throw "Unable to parse the SDK node in '$JSonFile'"
+ }
+ }
+ else {
+ throw "Unable to find the SDK node in '$JSonFile'"
+ }
+ If ($Version -eq $null) {
+ throw "Unable to find the SDK:version node in '$JSonFile'"
+ }
+ return $Version
+}
+
+function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) {
+ Say-Invocation $MyInvocation
+
+ if (-not $JSonFile) {
+ switch ($Version.ToLower()) {
+ { $_ -eq "latest" } {
+ $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
+ return $LatestVersionInfo.Version
+ }
+ { $_ -eq "coherent" } {
+ $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
+ return $LatestVersionInfo.Version
+ }
+ default { return $Version }
+ }
+ }
+ else {
+ return Parse-Jsonfile-For-Version $JSonFile
+ }
+}
+
+function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+ Say-Invocation $MyInvocation
+
+ # If anything fails in this lookup it will default to $SpecificVersion
+ $SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
+
+ if ($Runtime -eq "dotnet") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif ($Runtime -eq "windowsdesktop") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif (-not $Runtime) {
+ $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ else {
+ throw "Invalid value for `$Runtime"
+ }
+
+ Say-Verbose "Constructed primary named payload URL: $PayloadURL"
+
+ return $PayloadURL, $SpecificProductVersion
+}
+
+function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+ Say-Invocation $MyInvocation
+
+ if (-not $Runtime) {
+ $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip"
+ }
+ elseif ($Runtime -eq "dotnet") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip"
+ }
+ else {
+ return $null
+ }
+
+ Say-Verbose "Constructed legacy named payload URL: $PayloadURL"
+
+ return $PayloadURL
+}
+
+function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
+ Say-Invocation $MyInvocation
+
+ if ($Runtime -eq "dotnet") {
+ $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif ($Runtime -eq "windowsdesktop") {
+ $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif (-not $Runtime) {
+ $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
+ }
+ else {
+ throw "Invalid value specified for `$Runtime"
+ }
+
+ Say-Verbose "Checking for existence of $ProductVersionTxtURL"
+
+ try {
+ $productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
+
+ if ($productVersionResponse.StatusCode -eq 200) {
+ $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
+ if ($productVersion -ne $SpecificVersion)
+ {
+ Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
+ }
+
+ return $productVersion
+ }
+ else {
+ Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+ $productVersion = $SpecificVersion
+ }
+ } catch {
+ Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+ $productVersion = $SpecificVersion
+ }
+
+ return $productVersion
+}
+
+function Get-User-Share-Path() {
+ Say-Invocation $MyInvocation
+
+ $InstallRoot = $env:DOTNET_INSTALL_DIR
+ if (!$InstallRoot) {
+ $InstallRoot = "$env:LocalAppData\Microsoft\dotnet"
+ }
+ return $InstallRoot
+}
+
+function Resolve-Installation-Path([string]$InstallDir) {
+ Say-Invocation $MyInvocation
+
+ if ($InstallDir -eq "") {
+ return Get-User-Share-Path
+ }
+ return $InstallDir
+}
+
+function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) {
+ Say-Invocation $MyInvocation
+
+ $DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion
+ Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath"
+ return Test-Path $DotnetPackagePath -PathType Container
+}
+
+function Get-Absolute-Path([string]$RelativeOrAbsolutePath) {
+ # Too much spam
+ # Say-Invocation $MyInvocation
+
+ return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath)
+}
+
+function Get-Path-Prefix-With-Version($path) {
+ $match = [regex]::match($path, $VersionRegEx)
+ if ($match.Success) {
+ return $entry.FullName.Substring(0, $match.Index + $match.Length)
+ }
+
+ return $null
+}
+
+function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) {
+ Say-Invocation $MyInvocation
+
+ $ret = @()
+ foreach ($entry in $Zip.Entries) {
+ $dir = Get-Path-Prefix-With-Version $entry.FullName
+ if ($dir -ne $null) {
+ $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir)
+ if (-Not (Test-Path $path -PathType Container)) {
+ $ret += $dir
+ }
+ }
+ }
+
+ $ret = $ret | Sort-Object | Get-Unique
+
+ $values = ($ret | foreach { "$_" }) -join ";"
+ Say-Verbose "Directories to unpack: $values"
+
+ return $ret
+}
+
+# Example zip content and extraction algorithm:
+# Rule: files if extracted are always being extracted to the same relative path locally
+# .\
+# a.exe # file does not exist locally, extract
+# b.dll # file exists locally, override only if $OverrideFiles set
+# aaa\ # same rules as for files
+# ...
+# abc\1.0.0\ # directory contains version and exists locally
+# ... # do not extract content under versioned part
+# abc\asd\ # same rules as for files
+# ...
+# def\ghi\1.0.1\ # directory contains version and does not exist locally
+# ... # extract content
+function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) {
+ Say-Invocation $MyInvocation
+
+ Load-Assembly -Assembly System.IO.Compression.FileSystem
+ Set-Variable -Name Zip
+ try {
+ $Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath)
+
+ $DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath
+
+ foreach ($entry in $Zip.Entries) {
+ $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName
+ if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) {
+ $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName)
+ $DestinationDir = Split-Path -Parent $DestinationPath
+ $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath))
+ if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) {
+ New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles)
+ }
+ }
+ }
+ }
+ finally {
+ if ($Zip -ne $null) {
+ $Zip.Dispose()
+ }
+ }
+}
+
+function DownloadFile($Source, [string]$OutPath) {
+ if ($Source -notlike "http*") {
+ # Using System.IO.Path.GetFullPath to get the current directory
+ # does not work in this context - $pwd gives the current directory
+ if (![System.IO.Path]::IsPathRooted($Source)) {
+ $Source = $(Join-Path -Path $pwd -ChildPath $Source)
+ }
+ $Source = Get-Absolute-Path $Source
+ Say "Copying file from $Source to $OutPath"
+ Copy-Item $Source $OutPath
+ return
+ }
+
+ $Stream = $null
+
+ try {
+ $Response = GetHTTPResponse -Uri $Source
+ $Stream = $Response.Content.ReadAsStreamAsync().Result
+ $File = [System.IO.File]::Create($OutPath)
+ $Stream.CopyTo($File)
+ $File.Close()
+ }
+ finally {
+ if ($Stream -ne $null) {
+ $Stream.Dispose()
+ }
+ }
+}
+
+function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
+ $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
+ if (-Not $NoPath) {
+ $SuffixedBinPath = "$BinPath;"
+ if (-Not $env:path.Contains($SuffixedBinPath)) {
+ Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process."
+ $env:path = $SuffixedBinPath + $env:path
+ } else {
+ Say-Verbose "Current process PATH already contains `"$BinPath`""
+ }
+ }
+ else {
+ Say "Binaries of dotnet can be found in $BinPath"
+ }
+}
+
+$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
+$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
+$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+
+$InstallRoot = Resolve-Installation-Path $InstallDir
+Say-Verbose "InstallRoot: $InstallRoot"
+$ScriptName = $MyInvocation.MyCommand.Name
+
+if ($DryRun) {
+ Say "Payload URLs:"
+ Say "Primary named payload URL: $DownloadLink"
+ if ($LegacyDownloadLink) {
+ Say "Legacy named payload URL: $LegacyDownloadLink"
+ }
+ $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
+ if ($Runtime -eq "dotnet") {
+ $RepeatableCommand+=" -Runtime `"dotnet`""
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $RepeatableCommand+=" -Runtime `"aspnetcore`""
+ }
+ foreach ($key in $MyInvocation.BoundParameters.Keys) {
+ if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
+ $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
+ }
+ }
+ Say "Repeatable invocation: $RepeatableCommand"
+ exit 0
+}
+
+if ($Runtime -eq "dotnet") {
+ $assetName = ".NET Core Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App"
+}
+elseif ($Runtime -eq "aspnetcore") {
+ $assetName = "ASP.NET Core Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App"
+}
+elseif ($Runtime -eq "windowsdesktop") {
+ $assetName = ".NET Core Windows Desktop Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App"
+}
+elseif (-not $Runtime) {
+ $assetName = ".NET Core SDK"
+ $dotnetPackageRelativePath = "sdk"
+}
+else {
+ throw "Invalid value for `$Runtime"
+}
+
+if ($SpecificVersion -ne $EffectiveVersion)
+{
+ Say "Performing installation checks for effective version: $EffectiveVersion"
+ $SpecificVersion = $EffectiveVersion
+}
+
+# Check if the SDK version is already installed.
+$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+if ($isAssetInstalled) {
+ Say "$assetName version $SpecificVersion is already installed."
+ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+ exit 0
+}
+
+New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
+
+$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
+$diskInfo = Get-PSDrive -Name $installDrive
+if ($diskInfo.Free / 1MB -le 100) {
+ Say "There is not enough disk space on drive ${installDrive}:"
+ exit 0
+}
+
+$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+Say-Verbose "Zip path: $ZipPath"
+
+$DownloadFailed = $false
+Say "Downloading link: $DownloadLink"
+try {
+ DownloadFile -Source $DownloadLink -OutPath $ZipPath
+}
+catch {
+ Say "Cannot download: $DownloadLink"
+ if ($LegacyDownloadLink) {
+ $DownloadLink = $LegacyDownloadLink
+ $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+ Say-Verbose "Legacy zip path: $ZipPath"
+ Say "Downloading legacy link: $DownloadLink"
+ try {
+ DownloadFile -Source $DownloadLink -OutPath $ZipPath
+ }
+ catch {
+ Say "Cannot download: $DownloadLink"
+ $DownloadFailed = $true
+ }
+ }
+ else {
+ $DownloadFailed = $true
+ }
+}
+
+if ($DownloadFailed) {
+ throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+}
+
+Say "Extracting zip from $DownloadLink"
+Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot
+
+# Check if the SDK version is installed; if not, fail the installation.
+$isAssetInstalled = $false
+
+# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") {
+ $ReleaseVersion = $SpecificVersion.Split("-")[0]
+ Say-Verbose "Checking installation: version = $ReleaseVersion"
+ $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion
+}
+
+# Check if the SDK version is installed.
+if (!$isAssetInstalled) {
+ Say-Verbose "Checking installation: version = $SpecificVersion"
+ $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+}
+
+if (!$isAssetInstalled) {
+ throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
+}
+
+Remove-Item $ZipPath
+
+Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+
+Say "Installation finished"
+exit 0
\ No newline at end of file
diff --git a/eng/common/dotnet-install-scripts/dotnet-install.sh b/eng/common/dotnet-install-scripts/dotnet-install.sh
new file mode 100644
index 00000000000..92161141f6c
--- /dev/null
+++ b/eng/common/dotnet-install-scripts/dotnet-install.sh
@@ -0,0 +1,1133 @@
+#!/usr/bin/env bash
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Stop script on NZEC
+set -e
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
+# This is causing it to fail
+set -o pipefail
+
+# Use in the the functions: eval $invocation
+invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
+
+# standard output may be used as a return value in the functions
+# we need a way to write text on the screen in the functions so that
+# it won't interfere with the return value.
+# Exposing stream 3 as a pipe to standard output of the script itself
+exec 3>&1
+
+# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
+# See if stdout is a terminal
+if [ -t 1 ] && command -v tput > /dev/null; then
+ # see if it supports colors
+ ncolors=$(tput colors)
+ if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+ bold="$(tput bold || echo)"
+ normal="$(tput sgr0 || echo)"
+ black="$(tput setaf 0 || echo)"
+ red="$(tput setaf 1 || echo)"
+ green="$(tput setaf 2 || echo)"
+ yellow="$(tput setaf 3 || echo)"
+ blue="$(tput setaf 4 || echo)"
+ magenta="$(tput setaf 5 || echo)"
+ cyan="$(tput setaf 6 || echo)"
+ white="$(tput setaf 7 || echo)"
+ fi
+fi
+
+say_warning() {
+ printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
+}
+
+say_err() {
+ printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
+}
+
+say() {
+ # using stream 3 (defined in the beginning) to not interfere with stdout of functions
+ # which may be used as return value
+ printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
+}
+
+say_verbose() {
+ if [ "$verbose" = true ]; then
+ say "$1"
+ fi
+}
+
+# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
+# then and only then should the Linux distribution appear in this list.
+# Adding a Linux distribution to this list does not imply distribution-specific support.
+get_legacy_os_name_from_platform() {
+ eval $invocation
+
+ platform="$1"
+ case "$platform" in
+ "centos.7")
+ echo "centos"
+ return 0
+ ;;
+ "debian.8")
+ echo "debian"
+ return 0
+ ;;
+ "debian.9")
+ echo "debian.9"
+ return 0
+ ;;
+ "fedora.23")
+ echo "fedora.23"
+ return 0
+ ;;
+ "fedora.24")
+ echo "fedora.24"
+ return 0
+ ;;
+ "fedora.27")
+ echo "fedora.27"
+ return 0
+ ;;
+ "fedora.28")
+ echo "fedora.28"
+ return 0
+ ;;
+ "opensuse.13.2")
+ echo "opensuse.13.2"
+ return 0
+ ;;
+ "opensuse.42.1")
+ echo "opensuse.42.1"
+ return 0
+ ;;
+ "opensuse.42.3")
+ echo "opensuse.42.3"
+ return 0
+ ;;
+ "rhel.7"*)
+ echo "rhel"
+ return 0
+ ;;
+ "ubuntu.14.04")
+ echo "ubuntu"
+ return 0
+ ;;
+ "ubuntu.16.04")
+ echo "ubuntu.16.04"
+ return 0
+ ;;
+ "ubuntu.16.10")
+ echo "ubuntu.16.10"
+ return 0
+ ;;
+ "ubuntu.18.04")
+ echo "ubuntu.18.04"
+ return 0
+ ;;
+ "alpine.3.4.3")
+ echo "alpine"
+ return 0
+ ;;
+ esac
+ return 1
+}
+
+get_linux_platform_name() {
+ eval $invocation
+
+ if [ -n "$runtime_id" ]; then
+ echo "${runtime_id%-*}"
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ echo "$ID${VERSION_ID:+.${VERSION_ID}}"
+ return 0
+ elif [ -e /etc/redhat-release ]; then
+ local redhatRelease=$(&1 || true) | grep -q musl
+}
+
+get_current_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ "$uname" = "FreeBSD" ]; then
+ echo "freebsd"
+ return 0
+ elif [ "$uname" = "Linux" ]; then
+ local linux_platform_name
+ linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
+
+ if [ "$linux_platform_name" = "rhel.6" ]; then
+ echo $linux_platform_name
+ return 0
+ elif is_musl_based_distro; then
+ echo "linux-musl"
+ return 0
+ else
+ echo "linux"
+ return 0
+ fi
+ fi
+
+ say_err "OS name could not be detected: UName = $uname"
+ return 1
+}
+
+get_legacy_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ -n "$runtime_id" ]; then
+ echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "")
+ if [ -n "$os" ]; then
+ echo "$os"
+ return 0
+ fi
+ fi
+ fi
+
+ say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
+ return 1
+}
+
+machine_has() {
+ eval $invocation
+
+ hash "$1" > /dev/null 2>&1
+ return $?
+}
+
+
+check_min_reqs() {
+ local hasMinimum=false
+ if machine_has "curl"; then
+ hasMinimum=true
+ elif machine_has "wget"; then
+ hasMinimum=true
+ fi
+
+ if [ "$hasMinimum" = "false" ]; then
+ say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
+ return 1
+ fi
+ return 0
+}
+
+check_pre_reqs() {
+ eval $invocation
+
+ if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
+ return 0
+ fi
+
+ if [ "$(uname)" = "Linux" ]; then
+ if is_musl_based_distro; then
+ if ! command -v scanelf > /dev/null; then
+ say_warning "scanelf not found, please install pax-utils package."
+ return 0
+ fi
+ LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'"
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)."
+ else
+ if [ ! -x "$(command -v ldconfig)" ]; then
+ say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig."
+ LDCONFIG_COMMAND="/sbin/ldconfig"
+ else
+ LDCONFIG_COMMAND="ldconfig"
+ fi
+ local librarypath=${LD_LIBRARY_PATH:-}
+ LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
+ fi
+
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
+ fi
+
+ return 0
+}
+
+# args:
+# input - $1
+to_lowercase() {
+ #eval $invocation
+
+ echo "$1" | tr '[:upper:]' '[:lower:]'
+ return 0
+}
+
+# args:
+# input - $1
+remove_trailing_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input%/}"
+ return 0
+}
+
+# args:
+# input - $1
+remove_beginning_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input#/}"
+ return 0
+}
+
+# args:
+# root_path - $1
+# child_path - $2 - this parameter can be empty
+combine_paths() {
+ eval $invocation
+
+ # TODO: Consider making it work with any number of paths. For now:
+ if [ ! -z "${3:-}" ]; then
+ say_err "combine_paths: Function takes two parameters."
+ return 1
+ fi
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local child_path="$(remove_beginning_slash "${2:-}")"
+ say_verbose "combine_paths: root_path=$root_path"
+ say_verbose "combine_paths: child_path=$child_path"
+ echo "$root_path/$child_path"
+ return 0
+}
+
+get_machine_architecture() {
+ eval $invocation
+
+ if command -v uname > /dev/null; then
+ CPUName=$(uname -m)
+ case $CPUName in
+ armv7l)
+ echo "arm"
+ return 0
+ ;;
+ aarch64)
+ echo "arm64"
+ return 0
+ ;;
+ esac
+ fi
+
+ # Always default to 'x64'
+ echo "x64"
+ return 0
+}
+
+# args:
+# architecture - $1
+get_normalized_architecture_from_architecture() {
+ eval $invocation
+
+ local architecture="$(to_lowercase "$1")"
+ case "$architecture" in
+ \)
+ echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")"
+ return 0
+ ;;
+ amd64|x64)
+ echo "x64"
+ return 0
+ ;;
+ arm)
+ echo "arm"
+ return 0
+ ;;
+ arm64)
+ echo "arm64"
+ return 0
+ ;;
+ esac
+
+ say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues"
+ return 1
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+
+# args:
+# version_text - stdin
+get_version_from_version_info() {
+ eval $invocation
+
+ cat | tail -n 1 | sed 's/\r$//'
+ return 0
+}
+
+# args:
+# install_root - $1
+# relative_path_to_package - $2
+# specific_version - $3
+is_dotnet_package_installed() {
+ eval $invocation
+
+ local install_root="$1"
+ local relative_path_to_package="$2"
+ local specific_version="${3//[$'\t\r\n']}"
+
+ local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
+ say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
+
+ if [ -d "$dotnet_package_path" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# coherent - $4
+get_latest_version_info() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local coherent="$4"
+
+ local version_file_url=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ version_file_url="$uncached_feed/Runtime/$channel/latest.version"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
+ elif [ -z "$runtime" ]; then
+ if [ "$coherent" = true ]; then
+ version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
+ else
+ version_file_url="$uncached_feed/Sdk/$channel/latest.version"
+ fi
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+ say_verbose "get_latest_version_info: latest url: $version_file_url"
+
+ download "$version_file_url"
+ return $?
+}
+
+# args:
+# json_file - $1
+parse_jsonfile_for_version() {
+ eval $invocation
+
+ local json_file="$1"
+ if [ ! -f "$json_file" ]; then
+ say_err "Unable to find \`$json_file\`"
+ return 1
+ fi
+
+ sdk_section=$(cat $json_file | awk '/"sdk"/,/}/')
+ if [ -z "$sdk_section" ]; then
+ say_err "Unable to parse the SDK node in \`$json_file\`"
+ return 1
+ fi
+
+ sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
+ sdk_list=${sdk_list//[\" ]/}
+ sdk_list=${sdk_list//,/$'\n'}
+ sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')"
+
+ local version_info=""
+ while read -r line; do
+ IFS=:
+ while read -r key value; do
+ if [[ "$key" == "version" ]]; then
+ version_info=$value
+ fi
+ done <<< "$line"
+ done <<< "$sdk_list"
+ if [ -z "$version_info" ]; then
+ say_err "Unable to find the SDK:version node in \`$json_file\`"
+ return 1
+ fi
+
+ unset IFS;
+ echo "$version_info"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# version - $4
+# json_file - $5
+get_specific_version_from_version() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local version="$(to_lowercase "$4")"
+ local json_file="$5"
+
+ if [ -z "$json_file" ]; then
+ case "$version" in
+ latest)
+ local version_info
+ version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_version_info
+ return 0
+ ;;
+ coherent)
+ local version_info
+ version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_version_info
+ return 0
+ ;;
+ *)
+ echo "$version"
+ return 0
+ ;;
+ esac
+ else
+ local version_info
+ version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
+ echo "$version_info"
+ return 0
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+ local specific_product_version="$(get_specific_product_version "$1" "$4")"
+
+ local osname
+ osname="$(get_current_os_name)" || return 1
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+get_specific_product_version() {
+ # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
+ # to resolve the version of what's in the folder, superseding the specified version.
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="${2//[$'\t\r\n']}"
+ local specific_product_version=$specific_version
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
+ else
+ return 1
+ fi
+
+ specific_product_version=$(curl -s --fail "$download_link")
+ if [ $? -ne 0 ]
+ then
+ specific_product_version=$(wget -qO- "$download_link")
+ if [ $? -ne 0 ]
+ then
+ specific_product_version=$specific_version
+ fi
+ fi
+ specific_product_version="${specific_product_version//[$'\t\r\n']}"
+
+ echo "$specific_product_version"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_legacy_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+
+ local distro_specific_osname
+ distro_specific_osname="$(get_legacy_os_name)" || return 1
+
+ local legacy_download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ elif [ -z "$runtime" ]; then
+ legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$legacy_download_link"
+ return 0
+}
+
+get_user_install_path() {
+ eval $invocation
+
+ if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
+ echo "$DOTNET_INSTALL_DIR"
+ else
+ echo "$HOME/.dotnet"
+ fi
+ return 0
+}
+
+# args:
+# install_dir - $1
+resolve_installation_path() {
+ eval $invocation
+
+ local install_dir=$1
+ if [ "$install_dir" = "" ]; then
+ local user_install_path="$(get_user_install_path)"
+ say_verbose "resolve_installation_path: user_install_path=$user_install_path"
+ echo "$user_install_path"
+ return 0
+ fi
+
+ echo "$install_dir"
+ return 0
+}
+
+# args:
+# relative_or_absolute_path - $1
+get_absolute_path() {
+ eval $invocation
+
+ local relative_or_absolute_path=$1
+ echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
+ return 0
+}
+
+# args:
+# input_files - stdin
+# root_path - $1
+# out_path - $2
+# override - $3
+copy_files_or_dirs_from_list() {
+ eval $invocation
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local out_path="$(remove_trailing_slash "$2")"
+ local override="$3"
+ local osname="$(get_current_os_name)"
+ local override_switch=$(
+ if [ "$override" = false ]; then
+ if [ "$osname" = "linux-musl" ]; then
+ printf -- "-u";
+ else
+ printf -- "-n";
+ fi
+ fi)
+
+ cat | uniq | while read -r file_path; do
+ local path="$(remove_beginning_slash "${file_path#$root_path}")"
+ local target="$out_path/$path"
+ if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
+ mkdir -p "$out_path/$(dirname "$path")"
+ if [ -d "$target" ]; then
+ rm -rf "$target"
+ fi
+ cp -R $override_switch "$root_path/$path" "$target"
+ fi
+ done
+}
+
+# args:
+# zip_path - $1
+# out_path - $2
+extract_dotnet_package() {
+ eval $invocation
+
+ local zip_path="$1"
+ local out_path="$2"
+
+ local temp_out_path="$(mktemp -d "$temporary_file_template")"
+
+ local failed=false
+ tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
+
+ local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
+ find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
+ find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
+
+ rm -rf "$temp_out_path"
+
+ if [ "$failed" = true ]; then
+ say_err "Extraction failed"
+ return 1
+ fi
+}
+
+# args:
+# remote_path - $1
+# [out_path] - $2 - stdout if not provided
+download() {
+ eval $invocation
+
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ if [[ "$remote_path" != "http"* ]]; then
+ cp "$remote_path" "$out_path"
+ return $?
+ fi
+
+ local failed=false
+ if machine_has "curl"; then
+ downloadcurl "$remote_path" "$out_path" || failed=true
+ elif machine_has "wget"; then
+ downloadwget "$remote_path" "$out_path" || failed=true
+ else
+ failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Download failed: $remote_path"
+ return 1
+ fi
+ return 0
+}
+
+downloadcurl() {
+ eval $invocation
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ # Append feed_credential as late as possible before calling curl to avoid logging feed_credential
+ remote_path="${remote_path}${feed_credential}"
+
+ local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
+ local failed=false
+ if [ -z "$out_path" ]; then
+ curl $curl_options "$remote_path" || failed=true
+ else
+ curl $curl_options -o "$out_path" "$remote_path" || failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Curl download failed"
+ return 1
+ fi
+ return 0
+}
+
+downloadwget() {
+ eval $invocation
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ # Append feed_credential as late as possible before calling wget to avoid logging feed_credential
+ remote_path="${remote_path}${feed_credential}"
+ local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
+ local failed=false
+ if [ -z "$out_path" ]; then
+ wget -q $wget_options -O - "$remote_path" || failed=true
+ else
+ wget $wget_options -O "$out_path" "$remote_path" || failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Wget download failed"
+ return 1
+ fi
+ return 0
+}
+
+calculate_vars() {
+ eval $invocation
+ valid_legacy_download_link=true
+
+ normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
+ say_verbose "normalized_architecture=$normalized_architecture"
+
+ specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
+ specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
+ say_verbose "specific_version=$specific_version"
+ if [ -z "$specific_version" ]; then
+ say_err "Could not resolve version information."
+ return 1
+ fi
+
+ download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
+ say_verbose "Constructed primary named payload URL: $download_link"
+
+ legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ say_verbose "Constructed legacy named payload URL: $legacy_download_link"
+ else
+ say_verbose "Cound not construct a legacy_download_link; omitting..."
+ fi
+
+ install_root="$(resolve_installation_path "$install_dir")"
+ say_verbose "InstallRoot: $install_root"
+}
+
+install_dotnet() {
+ eval $invocation
+ local download_failed=false
+ local asset_name=''
+ local asset_relative_path=''
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ asset_relative_path="shared/Microsoft.NETCore.App"
+ asset_name=".NET Core Runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ asset_relative_path="shared/Microsoft.AspNetCore.App"
+ asset_name="ASP.NET Core Runtime"
+ elif [ -z "$runtime" ]; then
+ asset_relative_path="sdk"
+ asset_name=".NET Core SDK"
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+
+ # Check if the SDK version is already installed.
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
+ say "$asset_name version $specific_version is already installed."
+ return 0
+ fi
+
+ mkdir -p "$install_root"
+ zip_path="$(mktemp "$temporary_file_template")"
+ say_verbose "Zip path: $zip_path"
+
+ say "Downloading link: $download_link"
+
+ # Failures are normal in the non-legacy case for ultimately legacy downloads.
+ # Do not output to stderr, since output to stderr is considered an error.
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ # if the download fails, download the legacy_download_link
+ if [ "$download_failed" = true ]; then
+ say "Cannot download: $download_link"
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ download_failed=false
+ download_link="$legacy_download_link"
+ zip_path="$(mktemp "$temporary_file_template")"
+ say_verbose "Legacy zip path: $zip_path"
+ say "Downloading legacy link: $download_link"
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ if [ "$download_failed" = true ]; then
+ say "Cannot download: $download_link"
+ fi
+ fi
+ fi
+
+ if [ "$download_failed" = true ]; then
+ say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+ return 1
+ fi
+
+ say "Extracting zip from $download_link"
+ extract_dotnet_package "$zip_path" "$install_root"
+
+ # Check if the SDK version is installed; if not, fail the installation.
+ # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+ if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then
+ IFS='-'
+ read -ra verArr <<< "$specific_version"
+ release_version="${verArr[0]}"
+ unset IFS;
+ say_verbose "Checking installation: version = $release_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then
+ return 0
+ fi
+ fi
+
+ # Check if the standard SDK version is installed.
+ say_verbose "Checking installation: version = $specific_product_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then
+ return 0
+ fi
+
+ say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
+ return 1
+}
+
+args=("$@")
+
+local_version_file_relative_path="/.version"
+bin_folder_relative_path=""
+temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
+
+channel="LTS"
+version="Latest"
+json_file=""
+install_dir=""
+architecture=""
+dry_run=false
+no_path=false
+no_cdn=false
+azure_feed="https://dotnetcli.azureedge.net/dotnet"
+uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
+feed_credential=""
+verbose=false
+runtime=""
+runtime_id=""
+override_non_versioned_files=true
+non_dynamic_parameters=""
+
+while [ $# -ne 0 ]
+do
+ name="$1"
+ case "$name" in
+ -c|--channel|-[Cc]hannel)
+ shift
+ channel="$1"
+ ;;
+ -v|--version|-[Vv]ersion)
+ shift
+ version="$1"
+ ;;
+ -i|--install-dir|-[Ii]nstall[Dd]ir)
+ shift
+ install_dir="$1"
+ ;;
+ --arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
+ shift
+ architecture="$1"
+ ;;
+ --shared-runtime|-[Ss]hared[Rr]untime)
+ say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
+ if [ -z "$runtime" ]; then
+ runtime="dotnet"
+ fi
+ ;;
+ --runtime|-[Rr]untime)
+ shift
+ runtime="$1"
+ if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
+ say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
+ if [[ "$runtime" == "windowsdesktop" ]]; then
+ say_err "WindowsDesktop archives are manufactured for Windows platforms only."
+ fi
+ exit 1
+ fi
+ ;;
+ --dry-run|-[Dd]ry[Rr]un)
+ dry_run=true
+ ;;
+ --no-path|-[Nn]o[Pp]ath)
+ no_path=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --verbose|-[Vv]erbose)
+ verbose=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --no-cdn|-[Nn]o[Cc]dn)
+ no_cdn=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --azure-feed|-[Aa]zure[Ff]eed)
+ shift
+ azure_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --uncached-feed|-[Uu]ncached[Ff]eed)
+ shift
+ uncached_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --feed-credential|-[Ff]eed[Cc]redential)
+ shift
+ feed_credential="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --runtime-id|-[Rr]untime[Ii]d)
+ shift
+ runtime_id="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --jsonfile|-[Jj][Ss]on[Ff]ile)
+ shift
+ json_file="$1"
+ ;;
+ --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
+ override_non_versioned_files=false
+ non_dynamic_parameters+=" $name"
+ ;;
+ -?|--?|-h|--help|-[Hh]elp)
+ script_name="$(basename "$0")"
+ echo ".NET Tools Installer"
+ echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]"
+ echo " $script_name -h|-?|--help"
+ echo ""
+ echo "$script_name is a simple command line interface for obtaining dotnet cli."
+ echo ""
+ echo "Options:"
+ echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`."
+ echo " -Channel"
+ echo " Possible values:"
+ echo " - Current - most current release"
+ echo " - LTS - most current supported release"
+ echo " - 2-part version in a format A.B - represents a specific release"
+ echo " examples: 2.0; 1.0"
+ echo " - Branch name"
+ echo " examples: release/2.0.0; Master"
+ echo " Note: The version parameter overrides the channel parameter."
+ echo " -v,--version Use specific VERSION, Defaults to \`$version\`."
+ echo " -Version"
+ echo " Possible values:"
+ echo " - latest - most latest build on specific channel"
+ echo " - coherent - most latest coherent build on specific channel"
+ echo " coherent applies only to SDK downloads"
+ echo " - 3-part version in a format A.B.C - represents specific version of build"
+ echo " examples: 2.0.0-preview2-006120; 1.1.0"
+ echo " -i,--install-dir Install under specified location (see Install Location below)"
+ echo " -InstallDir"
+ echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
+ echo " --arch,-Architecture,-Arch"
+ echo " Possible values: x64, arm, and arm64"
+ echo " --runtime Installs a shared runtime only, without the SDK."
+ echo " -Runtime"
+ echo " Possible values:"
+ echo " - dotnet - the Microsoft.NETCore.App shared runtime"
+ echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
+ echo " --dry-run,-DryRun Do not perform installation. Display download link."
+ echo " --no-path, -NoPath Do not set PATH for the current process."
+ echo " --verbose,-Verbose Display diagnostics information."
+ echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user."
+ echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user."
+ echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified."
+ echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
+ echo " -SkipNonVersionedFiles"
+ echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
+ echo " --jsonfile Determines the SDK version from a user specified global.json file."
+ echo " Note: global.json must have a value for 'SDK:Version'"
+ echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
+ echo " -RuntimeId"
+ echo " -?,--?,-h,--help,-Help Shows this help message"
+ echo ""
+ echo "Obsolete parameters:"
+ echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
+ echo " This parameter is obsolete and may be removed in a future version of this script."
+ echo " Installs just the shared runtime bits, not the entire SDK."
+ echo ""
+ echo "Install Location:"
+ echo " Location is chosen in following order:"
+ echo " - --install-dir option"
+ echo " - Environmental variable DOTNET_INSTALL_DIR"
+ echo " - $HOME/.dotnet"
+ exit 0
+ ;;
+ *)
+ say_err "Unknown argument \`$name\`"
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+if [ "$no_cdn" = true ]; then
+ azure_feed="$uncached_feed"
+fi
+
+check_min_reqs
+calculate_vars
+script_name=$(basename "$0")
+
+if [ "$dry_run" = true ]; then
+ say "Payload URLs:"
+ say "Primary named payload URL: $download_link"
+ if [ "$valid_legacy_download_link" = true ]; then
+ say "Legacy named payload URL: $legacy_download_link"
+ fi
+ repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
+ if [[ "$runtime" == "dotnet" ]]; then
+ repeatable_command+=" --runtime "\""dotnet"\"""
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ repeatable_command+=" --runtime "\""aspnetcore"\"""
+ fi
+ repeatable_command+="$non_dynamic_parameters"
+ say "Repeatable invocation: $repeatable_command"
+ exit 0
+fi
+
+check_pre_reqs
+install_dotnet
+
+bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
+if [ "$no_path" = false ]; then
+ say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
+ export PATH="$bin_path":"$PATH"
+else
+ say "Binaries of dotnet can be found in $bin_path"
+fi
+
+say "Installation finished successfully."
diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1
index ec3e739fe83..811f0f717f7 100644
--- a/eng/common/dotnet-install.ps1
+++ b/eng/common/dotnet-install.ps1
@@ -1,28 +1,27 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
- [string] $verbosity = "minimal",
- [string] $architecture = "",
- [string] $version = "Latest",
- [string] $runtime = "dotnet",
- [string] $RuntimeSourceFeed = "",
- [string] $RuntimeSourceFeedKey = ""
+ [string] $verbosity = 'minimal',
+ [string] $architecture = '',
+ [string] $version = 'Latest',
+ [string] $runtime = 'dotnet',
+ [string] $RuntimeSourceFeed = '',
+ [string] $RuntimeSourceFeedKey = ''
)
. $PSScriptRoot\tools.ps1
-$dotnetRoot = Join-Path $RepoRoot ".dotnet"
+$dotnetRoot = Join-Path $RepoRoot '.dotnet'
$installdir = $dotnetRoot
try {
- if ($architecture -and $architecture.Trim() -eq "x86") {
- $installdir = Join-Path $installdir "x86"
+ if ($architecture -and $architecture.Trim() -eq 'x86') {
+ $installdir = Join-Path $installdir 'x86'
}
- InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
-}
+ InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
+}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index d259a274c78..ead6a1d9a24 100755
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -11,6 +11,8 @@ while [[ -h "$source" ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+. "$scriptroot/tools.sh"
+
version='Latest'
architecture=''
runtime='dotnet'
@@ -40,18 +42,47 @@ while [[ $# > 0 ]]; do
runtimeSourceFeedKey="$1"
;;
*)
- echo "Invalid argument: $1"
+ Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
exit 1
;;
esac
shift
done
-. "$scriptroot/tools.sh"
+# Use uname to determine what the CPU is.
+cpuname=$(uname -p)
+# Some Linux platforms report unknown for platform, but the arch for machine.
+if [[ "$cpuname" == "unknown" ]]; then
+ cpuname=$(uname -m)
+fi
+
+case $cpuname in
+ aarch64)
+ buildarch=arm64
+ ;;
+ amd64|x86_64)
+ buildarch=x64
+ ;;
+ armv*l)
+ buildarch=arm
+ ;;
+ i686)
+ buildarch=x86
+ ;;
+ *)
+ echo "Unknown CPU $cpuname detected, treating it as x64"
+ buildarch=x64
+ ;;
+esac
+
dotnetRoot="$repo_root/.dotnet"
+if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
+ dotnetRoot="$dotnetRoot/$architecture"
+fi
+
InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
local exit_code=$?
- echo "dotnet-install.sh failed (exit code '$exit_code')." >&2
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
}
diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1
index eccbf9f1b16..da09da4f1fc 100644
--- a/eng/common/enable-cross-org-publishing.ps1
+++ b/eng/common/enable-cross-org-publishing.ps1
@@ -2,5 +2,12 @@ param(
[string] $token
)
-Write-Host "##vso[task.setvariable variable=VSS_NUGET_ACCESSTOKEN]$token"
-Write-Host "##vso[task.setvariable variable=VSS_NUGET_URI_PREFIXES]https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/"
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+# Write-PipelineSetVariable will no-op if a variable named $ci is not defined
+# Since this script is only ever called in AzDO builds, just universally set it
+$ci = $true
+
+Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
diff --git a/eng/common/generate-graph-files.ps1 b/eng/common/generate-graph-files.ps1
index b056e4c1ac2..0728b1a8b57 100644
--- a/eng/common/generate-graph-files.ps1
+++ b/eng/common/generate-graph-files.ps1
@@ -3,39 +3,39 @@ Param(
[Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
[Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
[Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
- [string] $darcVersion = '1.1.0-beta.19175.6', # darc's version
+ [string] $darcVersion, # darc's version
[string] $graphvizVersion = '2.38', # GraphViz version
[switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
# toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
)
-$ErrorActionPreference = "Stop"
-. $PSScriptRoot\tools.ps1
-
-Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
-
function CheckExitCode ([string]$stage)
{
$exitCode = $LASTEXITCODE
if ($exitCode -ne 0) {
- Write-Host "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
+ Write-PipelineTelemetryError -Category 'Arcade' -Message "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
ExitWithExitCode $exitCode
}
}
try {
+ $ErrorActionPreference = 'Stop'
+ . $PSScriptRoot\tools.ps1
+
+ Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
+
Push-Location $PSScriptRoot
- Write-Host "Installing darc..."
+ Write-Host 'Installing darc...'
. .\darc-init.ps1 -darcVersion $darcVersion
- CheckExitCode "Running darc-init"
+ CheckExitCode 'Running darc-init'
- $engCommonBaseDir = Join-Path $PSScriptRoot "native\"
+ $engCommonBaseDir = Join-Path $PSScriptRoot 'native\'
$graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
- $nativeToolBaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external"
- $installBin = Join-Path $graphvizInstallDir "bin"
+ $nativeToolBaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
+ $installBin = Join-Path $graphvizInstallDir 'bin'
- Write-Host "Installing dot..."
+ Write-Host 'Installing dot...'
.\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
$darcExe = "$env:USERPROFILE\.dotnet\tools"
@@ -51,37 +51,36 @@ try {
$graphVizImageFilePath = "$outputFolder\graph.png"
$normalGraphFilePath = "$outputFolder\graph-full.txt"
$flatGraphFilePath = "$outputFolder\graph-flat.txt"
- $baseOptions = @( "--github-pat", "$gitHubPat", "--azdev-pat", "$azdoPat", "--password", "$barToken" )
+ $baseOptions = @( '--github-pat', "$gitHubPat", '--azdev-pat', "$azdoPat", '--password', "$barToken" )
if ($includeToolset) {
- Write-Host "Toolsets will be included in the graph..."
- $baseOptions += @( "--include-toolset" )
+ Write-Host 'Toolsets will be included in the graph...'
+ $baseOptions += @( '--include-toolset' )
}
- Write-Host "Generating standard dependency graph..."
+ Write-Host 'Generating standard dependency graph...'
& "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
- CheckExitCode "Generating normal dependency graph"
+ CheckExitCode 'Generating normal dependency graph'
- Write-Host "Generating flat dependency graph and graphviz file..."
+ Write-Host 'Generating flat dependency graph and graphviz file...'
& "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
- CheckExitCode "Generating flat and graphviz dependency graph"
+ CheckExitCode 'Generating flat and graphviz dependency graph'
Write-Host "Generating graph image $graphVizFilePath"
$dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
& "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
- CheckExitCode "Generating graphviz image"
+ CheckExitCode 'Generating graphviz image'
Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
}
catch {
if (!$includeToolset) {
- Write-Host "This might be a toolset repo which includes only toolset dependencies. " -NoNewline -ForegroundColor Yellow
- Write-Host "Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again..." -ForegroundColor Yellow
+ Write-Host 'This might be a toolset repo which includes only toolset dependencies. ' -NoNewline -ForegroundColor Yellow
+ Write-Host 'Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again...' -ForegroundColor Yellow
}
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Arcade' -Message $_
ExitWithExitCode 1
} finally {
- Pop-Location
+ Pop-Location
}
\ No newline at end of file
diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1
index 8cf18bcfeba..db830c00a6f 100644
--- a/eng/common/init-tools-native.ps1
+++ b/eng/common/init-tools-native.ps1
@@ -35,7 +35,7 @@ File path to global.json file
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
- [string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external",
+ [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external',
[string] $InstallDirectory,
[switch] $Clean = $False,
[switch] $Force = $False,
@@ -45,26 +45,27 @@ Param (
)
if (!$GlobalJsonFile) {
- $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName "global.json"
+ $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json'
}
Set-StrictMode -version 2.0
-$ErrorActionPreference="Stop"
+$ErrorActionPreference='Stop'
-Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
+. $PSScriptRoot\pipeline-logging-functions.ps1
+Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
try {
# Define verbose switch if undefined
- $Verbose = $VerbosePreference -Eq "Continue"
+ $Verbose = $VerbosePreference -Eq 'Continue'
- $EngCommonBaseDir = Join-Path $PSScriptRoot "native\"
+ $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\'
$NativeBaseDir = $InstallDirectory
if (!$NativeBaseDir) {
$NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
}
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
- $InstallBin = Join-Path $NativeBaseDir "bin"
- $InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1"
+ $InstallBin = Join-Path $NativeBaseDir 'bin'
+ $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1'
# Process tools list
Write-Host "Processing $GlobalJsonFile"
@@ -74,7 +75,7 @@ try {
}
$NativeTools = Get-Content($GlobalJsonFile) -Raw |
ConvertFrom-Json |
- Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue
+ Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
if ($NativeTools) {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
@@ -112,18 +113,21 @@ try {
}
$toolInstallationFailure = $true
} else {
- Write-Error $errMsg
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host $errMsg
exit 1
}
}
}
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host 'Native tools bootstrap failed'
exit 1
}
}
else {
- Write-Host "No native tools defined in global.json"
+ Write-Host 'No native tools defined in global.json'
exit 0
}
@@ -131,17 +135,18 @@ try {
exit 0
}
if (Test-Path $InstallBin) {
- Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin)
+ Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin)
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
+ return $InstallBin
}
else {
- Write-Error "Native tools install directory does not exist, installation failed"
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
exit 1
}
exit 0
}
catch {
- Write-Host $_
- Write-Host $_.Exception
- exit 1
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_
+ ExitWithExitCode 1
}
diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh
index 4dafaaca130..29fc5db8ae0 100755
--- a/eng/common/init-tools-native.sh
+++ b/eng/common/init-tools-native.sh
@@ -12,6 +12,7 @@ retry_wait_time_seconds=30
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
declare -A native_assets
+. $scriptroot/pipeline-logging-functions.sh
. $scriptroot/native/common-library.sh
while (($# > 0)); do
@@ -33,6 +34,14 @@ while (($# > 0)); do
force=true
shift 1
;;
+ --donotabortonfailure)
+ donotabortonfailure=true
+ shift 1
+ ;;
+ --donotdisplaywarnings)
+ donotdisplaywarnings=true
+ shift 1
+ ;;
--downloadretries)
download_retries=$2
shift 2
@@ -51,6 +60,8 @@ while (($# > 0)); do
echo " - (default) %USERPROFILE%/.netcoreeng/native"
echo ""
echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
+ echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure"
+ echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure"
echo " --force Clean and then install tools"
echo " --help Print help and exit"
echo ""
@@ -91,6 +102,7 @@ if [[ -z $install_directory ]]; then
fi
install_bin="${native_base_dir}/bin"
+installed_any=false
ReadGlobalJsonNativeTools
@@ -102,8 +114,8 @@ else
for tool in "${!native_assets[@]}"
do
tool_version=${native_assets[$tool]}
- installer_name="install-$tool.sh"
- installer_command="$native_installer_dir/$installer_name"
+ installer_path="$native_installer_dir/install-$tool.sh"
+ installer_command="$installer_path"
installer_command+=" --baseuri $base_uri"
installer_command+=" --installpath $install_bin"
installer_command+=" --version $tool_version"
@@ -117,11 +129,29 @@ else
installer_command+=" --clean"
fi
- $installer_command
-
- if [[ $? != 0 ]]; then
- echo "Execution Failed" >&2
- exit 1
+ if [[ -a $installer_path ]]; then
+ $installer_command
+ if [[ $? != 0 ]]; then
+ if [[ $donotabortonfailure = true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ exit 1
+ fi
+ else
+ $installed_any = true
+ fi
+ else
+ if [[ $donotabortonfailure == true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ exit 1
+ fi
fi
done
fi
@@ -134,8 +164,10 @@ if [[ -d $install_bin ]]; then
echo "Native tools are available from $install_bin"
echo "##vso[task.prependpath]$install_bin"
else
- echo "Native tools install directory does not exist, installation failed" >&2
- exit 1
+ if [[ $installed_any = true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed"
+ exit 1
+ fi
fi
exit 0
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
index 8b8bafd6a89..b8f6529fdc8 100644
--- a/eng/common/internal-feed-operations.ps1
+++ b/eng/common/internal-feed-operations.ps1
@@ -6,9 +6,8 @@ param(
[switch] $IsFeedPrivate
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
-
. $PSScriptRoot\tools.ps1
# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
@@ -21,7 +20,7 @@ function SetupCredProvider {
)
# Install the Cred Provider NuGet plugin
- Write-Host "Setting up Cred Provider NuGet plugin in the agent..."
+ Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
$url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
@@ -29,18 +28,18 @@ function SetupCredProvider {
Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
Invoke-WebRequest $url -OutFile installcredprovider.ps1
- Write-Host "Installing plugin..."
+ Write-Host 'Installing plugin...'
.\installcredprovider.ps1 -Force
Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
Remove-Item .\installcredprovider.ps1
if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
- Write-Host "CredProvider plugin was not installed correctly!"
+ Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
ExitWithExitCode 1
}
else {
- Write-Host "CredProvider plugin was installed correctly!"
+ Write-Host 'CredProvider plugin was installed correctly!'
}
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
@@ -49,7 +48,7 @@ function SetupCredProvider {
$nugetConfigPath = "$RepoRoot\NuGet.config"
if (-Not (Test-Path -Path $nugetConfigPath)) {
- Write-Host "NuGet.config file not found in repo's root!"
+ Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
ExitWithExitCode 1
}
@@ -64,6 +63,7 @@ function SetupCredProvider {
}
if (($endpoints | Measure-Object).Count -gt 0) {
+ # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
$endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
@@ -81,7 +81,7 @@ function SetupCredProvider {
}
else
{
- Write-Host "No internal endpoints found in NuGet.config"
+ Write-Host 'No internal endpoints found in NuGet.config'
}
}
@@ -99,7 +99,7 @@ function InstallDotNetSdkAndRestoreArcade {
& $dotnet restore $restoreProjPath
- Write-Host "Arcade SDK restored!"
+ Write-Host 'Arcade SDK restored!'
if (Test-Path -Path $restoreProjPath) {
Remove-Item $restoreProjPath
@@ -113,23 +113,22 @@ function InstallDotNetSdkAndRestoreArcade {
try {
Push-Location $PSScriptRoot
- if ($Operation -like "setup") {
+ if ($Operation -like 'setup') {
SetupCredProvider $AuthToken
}
- elseif ($Operation -like "install-restore") {
+ elseif ($Operation -like 'install-restore') {
InstallDotNetSdkAndRestoreArcade
}
else {
- Write-Host "Unknown operation '$Operation'!"
+ Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
ExitWithExitCode 1
}
}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Arcade' -Message $_
ExitWithExitCode 1
}
finally {
- Pop-Location
+ Pop-Location
}
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
index 1ff654d2ffc..9ed225e7e55 100755
--- a/eng/common/internal-feed-operations.sh
+++ b/eng/common/internal-feed-operations.sh
@@ -30,7 +30,7 @@ function SetupCredProvider {
rm installcredprovider.sh
if [ ! -d "$HOME/.nuget/plugins" ]; then
- echo "CredProvider plugin was not installed correctly!"
+ Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
ExitWithExitCode 1
else
echo "CredProvider plugin was installed correctly!"
@@ -42,7 +42,7 @@ function SetupCredProvider {
local nugetConfigPath="$repo_root/NuGet.config"
if [ ! "$nugetConfigPath" ]; then
- echo "NuGet.config file not found in repo's root!"
+ Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
ExitWithExitCode 1
fi
@@ -62,6 +62,7 @@ function SetupCredProvider {
endpoints+=']'
if [ ${#endpoints} -gt 2 ]; then
+ # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
index e33179ef373..dbf99d82a5c 100644
--- a/eng/common/internal/Directory.Build.props
+++ b/eng/common/internal/Directory.Build.props
@@ -1,4 +1,4 @@
-
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index 1a39a7ef3f6..f46d5efe2e3 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -4,6 +4,7 @@
net472
false
+ false
diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
index b37fd3d5e97..c6401230002 100644
--- a/eng/common/msbuild.ps1
+++ b/eng/common/msbuild.ps1
@@ -1,6 +1,6 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
- [string] $verbosity = "minimal",
+ [string] $verbosity = 'minimal',
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
[switch] $ci,
@@ -18,9 +18,8 @@ try {
MSBuild @extraArgs
}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
index 41416862d91..d7d1a651094 100644
--- a/eng/common/native/CommonLibrary.psm1
+++ b/eng/common/native/CommonLibrary.psm1
@@ -145,9 +145,12 @@ function Get-File {
New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
}
+ $TempPath = "$Path.tmp"
if (Test-Path -IsValid -Path $Uri) {
- Write-Verbose "'$Uri' is a file path, copying file to '$Path'"
- Copy-Item -Path $Uri -Destination $Path
+ Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'"
+ Copy-Item -Path $Uri -Destination $TempPath
+ Write-Verbose "Moving temporary file to '$Path'"
+ Move-Item -Path $TempPath -Destination $Path
return $?
}
else {
@@ -157,8 +160,10 @@ function Get-File {
while($Attempt -Lt $DownloadRetries)
{
try {
- Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path
- Write-Verbose "Downloaded to '$Path'"
+ Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath
+ Write-Verbose "Downloaded to temporary location '$TempPath'"
+ Move-Item -Path $TempPath -Destination $Path
+ Write-Verbose "Moved temporary file to '$Path'"
return $True
}
catch {
@@ -359,16 +364,21 @@ function Expand-Zip {
return $False
}
}
- if (-Not (Test-Path $OutputDirectory)) {
- New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null
+
+ $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp"
+ if (Test-Path $TempOutputDirectory) {
+ Remove-Item $TempOutputDirectory -Force -Recurse
}
+ New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null
Add-Type -assembly "system.io.compression.filesystem"
- [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory")
+ [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory")
if ($? -Eq $False) {
Write-Error "Unable to extract '$ZipPath'"
return $False
}
+
+ Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory
}
catch {
Write-Host $_
diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh
index 271bddfac5a..bf272dcf55a 100755
--- a/eng/common/native/common-library.sh
+++ b/eng/common/native/common-library.sh
@@ -34,7 +34,7 @@ function ExpandZip {
echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
rm -rf $output_directory
if [[ $? != 0 ]]; then
- echo Unable to remove '$output_directory'>&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
return 1
fi
fi
@@ -45,7 +45,7 @@ function ExpandZip {
echo "Extracting archive"
tar -xf $zip_path -C $output_directory
if [[ $? != 0 ]]; then
- echo "Unable to extract '$zip_path'" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
return 1
fi
@@ -117,7 +117,7 @@ function DownloadAndExtract {
# Download file
GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
- echo "Failed to download '$uri' to '$temp_tool_path'." >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
return 1
fi
@@ -125,7 +125,7 @@ function DownloadAndExtract {
echo "extracting from $temp_tool_path to $installDir"
ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
- echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
return 1
fi
@@ -148,7 +148,7 @@ function NewScriptShim {
fi
if [[ ! -f $tool_file_path ]]; then
- echo "Specified tool file path:'$tool_file_path' does not exist" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
return 1
fi
diff --git a/eng/common/native/find-native-compiler.sh b/eng/common/native/find-native-compiler.sh
new file mode 100644
index 00000000000..aed19d07d50
--- /dev/null
+++ b/eng/common/native/find-native-compiler.sh
@@ -0,0 +1,121 @@
+#!/usr/bin/env bash
+#
+# This file locates the native compiler with the given name and version and sets the environment variables to locate it.
+#
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+if [ $# -lt 0 ]
+then
+ echo "Usage..."
+ echo "find-native-compiler.sh "
+ echo "Specify the name of compiler (clang or gcc)."
+ echo "Specify the major version of compiler."
+ echo "Specify the minor version of compiler."
+ exit 1
+fi
+
+. $scriptroot/../pipeline-logging-functions.sh
+
+compiler="$1"
+cxxCompiler="$compiler++"
+majorVersion="$2"
+minorVersion="$3"
+
+if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
+
+check_version_exists() {
+ desired_version=-1
+
+ # Set up the environment to be used for building with the desired compiler.
+ if command -v "$compiler-$1.$2" > /dev/null; then
+ desired_version="-$1.$2"
+ elif command -v "$compiler$1$2" > /dev/null; then
+ desired_version="$1$2"
+ elif command -v "$compiler-$1$2" > /dev/null; then
+ desired_version="-$1$2"
+ fi
+
+ echo "$desired_version"
+}
+
+if [ -z "$CLR_CC" ]; then
+
+ # Set default versions
+ if [ -z "$majorVersion" ]; then
+ # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
+ if [ "$compiler" = "clang" ]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
+ elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi
+
+ for version in "${versions[@]}"; do
+ parts=(${version//./ })
+ desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
+ if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi
+ done
+
+ if [ -z "$majorVersion" ]; then
+ if command -v "$compiler" > /dev/null; then
+ if [ "$(uname)" != "Darwin" ]; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
+ fi
+ export CC="$(command -v "$compiler")"
+ export CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
+ exit 1
+ fi
+ else
+ if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
+ if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
+ if command -v "$compiler" > /dev/null; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
+ export CC="$(command -v "$compiler")"
+ export CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
+ exit 1
+ fi
+ fi
+ fi
+ fi
+ else
+ desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
+ if [ "$desired_version" = "-1" ]; then
+ Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ exit 1
+ fi
+ fi
+
+ if [ -z "$CC" ]; then
+ export CC="$(command -v "$compiler$desired_version")"
+ export CXX="$(command -v "$cxxCompiler$desired_version")"
+ if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi
+ fi
+else
+ if [ ! -f "$CLR_CC" ]; then
+ Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
+ exit 1
+ fi
+ export CC="$CLR_CC"
+ export CXX="$CLR_CXX"
+fi
+
+if [ -z "$CC" ]; then
+ Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
+ exit 1
+fi
+
+export CCC_CC="$CC"
+export CCC_CXX="$CXX"
+export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh
index 53ddf4e6860..12339a40761 100755
--- a/eng/common/native/install-cmake-test.sh
+++ b/eng/common/native/install-cmake-test.sh
@@ -101,7 +101,7 @@ fi
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
- echo "Installation failed" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
exit 1
fi
@@ -110,7 +110,7 @@ fi
NewScriptShim $shim_path $tool_file_path true
if [[ $? != 0 ]]; then
- echo "Shim generation failed" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
exit 1
fi
diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh
index 5f1a182fa9f..18041be8763 100755
--- a/eng/common/native/install-cmake.sh
+++ b/eng/common/native/install-cmake.sh
@@ -101,7 +101,7 @@ fi
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
- echo "Installation failed" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
exit 1
fi
@@ -110,7 +110,7 @@ fi
NewScriptShim $shim_path $tool_file_path true
if [[ $? != 0 ]]; then
- echo "Shim generation failed" >&2
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
exit 1
fi
diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1
index 635ab3fd414..f397e1c75d4 100644
--- a/eng/common/native/install-tool.ps1
+++ b/eng/common/native/install-tool.ps1
@@ -46,6 +46,8 @@ Param (
[int] $RetryWaitTimeInSeconds = 30
)
+. $PSScriptRoot\..\pipeline-logging-functions.ps1
+
# Import common library modules
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
@@ -93,7 +95,7 @@ try {
-Verbose:$Verbose
if ($InstallStatus -Eq $False) {
- Write-Error "Installation failed"
+ Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
exit 1
}
}
@@ -103,7 +105,7 @@ try {
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
exit 1
} elseif (@($ToolFilePath).Length -Lt 1) {
- Write-Error "$ToolName was not found in $ToolFilePath."
+ Write-Host "$ToolName was not found in $ToolFilePath."
exit 1
}
@@ -117,14 +119,14 @@ try {
-Verbose:$Verbose
if ($GenerateShimStatus -Eq $False) {
- Write-Error "Generate shim failed"
+ Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
return 1
}
exit 0
}
catch {
- Write-Host $_
- Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
exit 1
}
diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj
new file mode 100644
index 00000000000..3b25359c438
--- /dev/null
+++ b/eng/common/performance/blazor_perf.proj
@@ -0,0 +1,30 @@
+
+
+ python3
+ $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk
+
+
+
+
+ %(Identity)
+
+
+
+
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ $(ScenarioDirectory)blazor\
+
+
+ $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $(ScenarioDirectory)blazor/
+
+
+
+
+ $(WorkItemDirectory)
+ cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27
+ $(Python) test.py sod --scenario-name "%(Identity)"
+ $(Python) post.py
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj
new file mode 100644
index 00000000000..4264920382e
--- /dev/null
+++ b/eng/common/performance/crossgen_perf.proj
@@ -0,0 +1,69 @@
+
+
+
+
+ %(Identity)
+
+
+
+
+
+ py -3
+ $(HelixPreCommands)
+ %HELIX_CORRELATION_PAYLOAD%\Core_Root
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ $(ScenarioDirectory)crossgen\
+ $(ScenarioDirectory)crossgen2\
+
+
+ python3
+ $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update
+ $HELIX_CORRELATION_PAYLOAD/Core_Root
+ $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $(ScenarioDirectory)crossgen/
+ $(ScenarioDirectory)crossgen2/
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)
+
+
+
+
+
+
+ 4:00
+
+
+
+ 4:00
+
+
+ $(WorkItemDirectory)
+ $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp
+ 1:00
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/perfhelixpublish.proj b/eng/common/performance/microbenchmarks.proj
similarity index 71%
rename from eng/common/performance/perfhelixpublish.proj
rename to eng/common/performance/microbenchmarks.proj
index e5826b53237..94b6efbc929 100644
--- a/eng/common/performance/perfhelixpublish.proj
+++ b/eng/common/performance/microbenchmarks.proj
@@ -6,7 +6,8 @@
py -3
%HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
%HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
- $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd
+
+ $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%
%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts
%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline
%HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj
@@ -40,6 +41,17 @@
$HELIX_WORKITEM_ROOT/testResults.xml
+
+ $(CliArguments) --wasm
+
+
+
+ --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe
+
+
+ --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun
+
+
--corerun $(CoreRun)
@@ -55,6 +67,11 @@
$(WorkItemCommand) $(CliArguments)
+
+
+ 2:30
+ 0:15
+
@@ -63,7 +80,7 @@
- 5
+ 30
@@ -71,6 +88,31 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -86,7 +128,7 @@
$(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
$(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
$(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
- 4:00
+ $(WorkItemTimeout)
diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
index ec41965fc89..656c0bd9022 100644
--- a/eng/common/performance/performance-setup.ps1
+++ b/eng/common/performance/performance-setup.ps1
@@ -3,18 +3,22 @@ Param(
[string] $CoreRootDirectory,
[string] $BaselineCoreRootDirectory,
[string] $Architecture="x64",
- [string] $Framework="netcoreapp5.0",
+ [string] $Framework="net5.0",
[string] $CompilationMode="Tiered",
[string] $Repository=$env:BUILD_REPOSITORY_NAME,
[string] $Branch=$env:BUILD_SOURCEBRANCH,
[string] $CommitSha=$env:BUILD_SOURCEVERSION,
[string] $BuildNumber=$env:BUILD_BUILDNUMBER,
- [string] $RunCategories="coreclr corefx",
+ [string] $RunCategories="Libraries Runtime",
[string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
[string] $Kind="micro",
+ [switch] $LLVM,
+ [switch] $MonoInterpreter,
+ [switch] $MonoAOT,
[switch] $Internal,
[switch] $Compare,
- [string] $Configurations="CompilationMode=$CompilationMode"
+ [string] $MonoDotnet="",
+ [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind"
)
$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
@@ -31,7 +35,8 @@ $HelixSourcePrefix = "pr"
$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
-if ($Framework.StartsWith("netcoreapp")) {
+# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5.
+if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) {
$Queue = "Windows.10.Amd64.ClientRS5.Open"
}
@@ -49,9 +54,37 @@ if ($Internal) {
$HelixSourcePrefix = "official"
}
-$CommonSetupArguments="--frameworks $Framework --queue $Queue --build-number $BuildNumber --build-configs $Configurations"
+if($MonoInterpreter)
+{
+ $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
+}
+
+if($MonoDotnet -ne "")
+{
+ $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
+ if($ExtraBenchmarkDotNetArguments -eq "")
+ {
+ #FIX ME: We need to block these tests as they don't run on mono for now
+ $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+ }
+ else
+ {
+ #FIX ME: We need to block these tests as they don't run on mono for now
+ $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+ }
+}
+
+# FIX ME: This is a workaround until we get this from the actual pipeline
+$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
+
+#This grabs the LKG version number of dotnet and passes it to our scripts
+$VersionJSON = Get-Content global.json | ConvertFrom-Json
+$DotNetVersion = $VersionJSON.tools.dotnet
+$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments"
+
+
if ($RunFromPerformanceRepo) {
$SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
@@ -61,6 +94,13 @@ else {
git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
}
+if($MonoDotnet -ne "")
+{
+ $UsingMono = "true"
+ $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
+ Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
+}
+
if ($UseCoreRun) {
$NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
@@ -96,6 +136,7 @@ Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVar
Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
# Helix Arguments
Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
index 2f2092166e4..99d1b7bc1fc 100755
--- a/eng/common/performance/performance-setup.sh
+++ b/eng/common/performance/performance-setup.sh
@@ -4,7 +4,7 @@ source_directory=$BUILD_SOURCESDIRECTORY
core_root_directory=
baseline_core_root_directory=
architecture=x64
-framework=netcoreapp5.0
+framework=net5.0
compilation_mode=tiered
repository=$BUILD_REPOSITORY_NAME
branch=$BUILD_SOURCEBRANCH
@@ -12,13 +12,21 @@ commit_sha=$BUILD_SOURCEVERSION
build_number=$BUILD_BUILDNUMBER
internal=false
compare=false
+mono_dotnet=
kind="micro"
-run_categories="coreclr corefx"
+llvm=false
+monointerpreter=false
+monoaot=false
+run_categories="Libraries Runtime"
csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
-configurations=
+configurations="CompliationMode=$compilation_mode RunKind=$kind"
run_from_perf_repo=false
use_core_run=true
use_baseline_core_run=true
+using_mono=false
+wasm_runtime_loc=
+using_wasm=false
+use_latest_dotnet=false
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
@@ -65,6 +73,7 @@ while (($# > 0)); do
;;
--kind)
kind=$2
+ configurations="CompilationMode=$compilation_mode RunKind=$kind"
shift 2
;;
--runcategories)
@@ -79,6 +88,26 @@ while (($# > 0)); do
internal=true
shift 1
;;
+ --llvm)
+ llvm=true
+ shift 1
+ ;;
+ --monointerpreter)
+ monointerpreter=true
+ shift 1
+ ;;
+ --monoaot)
+ monoaot=true
+ shift 1
+ ;;
+ --monodotnet)
+ mono_dotnet=$2
+ shift 2
+ ;;
+ --wasm)
+ wasm_runtime_loc=$2
+ shift 2
+ ;;
--compare)
compare=true
shift 1
@@ -87,7 +116,11 @@ while (($# > 0)); do
configurations=$2
shift 2
;;
- --help)
+ --latestdotnet)
+ use_latest_dotnet=true
+ shift 1
+ ;;
+ *)
echo "Common settings:"
echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
echo " --architecture Architecture of the testing being run"
@@ -107,6 +140,9 @@ while (($# > 0)); do
echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
echo " --internal If the benchmarks are running as an official job."
+ echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
+ echo " --wasm Path to the unpacked wasm runtime pack."
+ echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
echo ""
exit 0
;;
@@ -118,7 +154,7 @@ if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-perfo
fi
if [ -z "$configurations" ]; then
- configurations="CompliationMode=$compilation_mode"
+ configurations="CompilationMode=$compilation_mode"
fi
if [ -z "$core_root_directory" ]; then
@@ -164,9 +200,31 @@ if [[ "$internal" == true ]]; then
fi
fi
-common_setup_arguments="--frameworks $framework --queue $queue --build-number $build_number --build-configs $configurations"
+if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
+fi
+
+if [[ "$wasm_runtime_loc" != "" ]]; then
+ configurations="CompilationMode=wasm RunKind=$kind"
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
+fi
+
+if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
+ configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
+fi
+
+common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
+
+if [[ "$use_latest_dotnet" = false ]]; then
+ # Get the tools section from the global.json.
+ # This grabs the LKG version number of dotnet and passes it to our scripts
+ dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'`
+ setup_arguments="--dotnet-versions $dotnet_version $setup_arguments"
+fi
+
if [[ "$run_from_perf_repo" = true ]]; then
payload_directory=
workitem_directory=$source_directory
@@ -179,6 +237,19 @@ else
mv $docs_directory $workitem_directory
fi
+if [[ "$wasm_runtime_loc" != "" ]]; then
+ using_wasm=true
+ wasm_dotnet_path=$payload_directory/dotnet-wasm
+ mv $wasm_runtime_loc $wasm_dotnet_path
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
+fi
+
+if [[ "$mono_dotnet" != "" ]]; then
+ using_mono=true
+ mono_dotnet_path=$payload_directory/dotnet-mono
+ mv $mono_dotnet $mono_dotnet_path
+fi
+
if [[ "$use_core_run" = true ]]; then
new_core_root=$payload_directory/Core_Root
mv $core_root_directory $new_core_root
@@ -203,7 +274,7 @@ Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_dire
Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Python" -value "$python3" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
@@ -214,3 +285,5 @@ Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix
Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
+Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
+Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1
index af5f48aaceb..8484451f3a5 100644
--- a/eng/common/pipeline-logging-functions.ps1
+++ b/eng/common/pipeline-logging-functions.ps1
@@ -12,6 +12,7 @@ $script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"
# TODO: BUG: Escape % ???
# TODO: Add test to verify don't need to escape "=".
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
function Write-PipelineTelemetryError {
[CmdletBinding()]
param(
@@ -25,49 +26,55 @@ function Write-PipelineTelemetryError {
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
- [switch]$AsOutput)
-
- $PSBoundParameters.Remove("Category") | Out-Null
+ [switch]$AsOutput,
+ [switch]$Force)
- $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
- $PSBoundParameters.Remove("Message") | Out-Null
- $PSBoundParameters.Add("Message", $Message)
+ $PSBoundParameters.Remove('Category') | Out-Null
+ if($Force -Or ((Test-Path variable:ci) -And $ci)) {
+ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+ }
+ $PSBoundParameters.Remove('Message') | Out-Null
+ $PSBoundParameters.Add('Message', $Message)
Write-PipelineTaskError @PSBoundParameters
}
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
function Write-PipelineTaskError {
[CmdletBinding()]
param(
- [Parameter(Mandatory = $true)]
- [string]$Message,
- [Parameter(Mandatory = $false)]
- [string]$Type = 'error',
- [string]$ErrCode,
- [string]$SourcePath,
- [string]$LineNumber,
- [string]$ColumnNumber,
- [switch]$AsOutput)
-
- if(!$ci) {
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force
+ )
+
+ if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
if($Type -eq 'error') {
- Write-Host $Message -ForegroundColor Red
- return
+ Write-Host $Message -ForegroundColor Red
+ return
}
elseif ($Type -eq 'warning') {
- Write-Host $Message -ForegroundColor Yellow
- return
+ Write-Host $Message -ForegroundColor Yellow
+ return
}
- }
-
- if(($Type -ne 'error') -and ($Type -ne 'warning')) {
+ }
+
+ if(($Type -ne 'error') -and ($Type -ne 'warning')) {
Write-Host $Message
return
- }
- if(-not $PSBoundParameters.ContainsKey('Type')) {
+ }
+ $PSBoundParameters.Remove('Force') | Out-Null
+ if(-not $PSBoundParameters.ContainsKey('Type')) {
$PSBoundParameters.Add('Type', 'error')
- }
- Write-LogIssue @PSBoundParameters
+ }
+ Write-LogIssue @PSBoundParameters
}
function Write-PipelineSetVariable {
@@ -80,7 +87,7 @@ function Write-PipelineTaskError {
[switch]$AsOutput,
[bool]$IsMultiJobVariable=$true)
- if($ci) {
+ if((Test-Path variable:ci) -And $ci) {
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
'variable' = $Name
'isSecret' = $Secret
@@ -95,7 +102,8 @@ function Write-PipelineTaskError {
[Parameter(Mandatory=$true)]
[string]$Path,
[switch]$AsOutput)
- if($ci) {
+
+ if((Test-Path variable:ci) -And $ci) {
Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
}
}
@@ -231,4 +239,4 @@ function Write-LogIssue {
}
Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
-}
\ No newline at end of file
+}
diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
index 1c560a50613..6cd0a3400e6 100755
--- a/eng/common/pipeline-logging-functions.sh
+++ b/eng/common/pipeline-logging-functions.sh
@@ -2,6 +2,7 @@
function Write-PipelineTelemetryError {
local telemetry_category=''
+ local force=false
local function_args=()
local message=''
while [[ $# -gt 0 ]]; do
@@ -11,6 +12,9 @@ function Write-PipelineTelemetryError {
telemetry_category=$2
shift
;;
+ -force|-f)
+ force=true
+ ;;
-*)
function_args+=("$1 $2")
shift
@@ -22,28 +26,26 @@ function Write-PipelineTelemetryError {
shift
done
- if [[ "$ci" != true ]]; then
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
echo "$message" >&2
return
fi
+ if [[ $force == true ]]; then
+ function_args+=("-force")
+ fi
message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
function_args+=("$message")
-
- Write-PipelineTaskError $function_args
+ Write-PipelineTaskError ${function_args[@]}
}
function Write-PipelineTaskError {
- if [[ "$ci" != true ]]; then
- echo "$@" >&2
- return
- fi
-
local message_type="error"
local sourcepath=''
local linenumber=''
local columnnumber=''
local error_code=''
+ local force=false
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
@@ -68,6 +70,9 @@ function Write-PipelineTaskError {
error_code=$2
shift
;;
+ -force|-f)
+ force=true
+ ;;
*)
break
;;
@@ -76,6 +81,11 @@ function Write-PipelineTaskError {
shift
done
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$@" >&2
+ return
+ fi
+
local message="##vso[task.logissue"
message="$message type=$message_type"
diff --git a/eng/common/post-build/promote-build.ps1 b/eng/common/post-build/add-build-to-channel.ps1
similarity index 55%
rename from eng/common/post-build/promote-build.ps1
rename to eng/common/post-build/add-build-to-channel.ps1
index e5ae85f2517..de2d957922a 100644
--- a/eng/common/post-build/promote-build.ps1
+++ b/eng/common/post-build/add-build-to-channel.ps1
@@ -2,26 +2,26 @@ param(
[Parameter(Mandatory=$true)][int] $BuildId,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com",
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16"
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
-. $PSScriptRoot\post-build-utils.ps1
-
try {
+ . $PSScriptRoot\post-build-utils.ps1
+
# Check that the channel we are going to promote the build to exist
$channelInfo = Get-MaestroChannel -ChannelId $ChannelId
if (!$channelInfo) {
- Write-Host "Channel with BAR ID $ChannelId was not found in BAR!"
+ Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
ExitWithExitCode 1
}
- # Get info about which channels the build has already been promoted to
+ # Get info about which channel(s) the build has already been promoted to
$buildInfo = Get-MaestroBuild -BuildId $BuildId
if (!$buildInfo) {
- Write-Host "Build with BAR ID $BuildId was not found in BAR!"
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
ExitWithExitCode 1
}
@@ -39,10 +39,10 @@ try {
Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
- Write-Host "done."
+ Write-Host 'done.'
}
catch {
- Write-Host "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
Write-Host $_
- Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
+ ExitWithExitCode 1
}
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
new file mode 100644
index 00000000000..63f3464c986
--- /dev/null
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -0,0 +1,40 @@
+param(
+ [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
+ [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ if ($PromoteToChannels -eq "") {
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ ExitWithExitCode 0
+ }
+
+ # Check that every channel that Maestro told to promote the build to
+ # is available in YAML
+ $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
+
+ $hasErrors = $false
+
+ foreach ($id in $PromoteToChannelsIds) {
+ if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
+ Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
+ $hasErrors = $true
+ }
+ }
+
+ # The `Write-PipelineTaskError` doesn't error the script and we might report several errors
+ # in the previous lines. The check below makes sure that we return an error state from the
+ # script if we reported any validation error
+ if ($hasErrors) {
+ ExitWithExitCode 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/darc-gather-drop.ps1 b/eng/common/post-build/darc-gather-drop.ps1
deleted file mode 100644
index 89854d3c1c2..00000000000
--- a/eng/common/post-build/darc-gather-drop.ps1
+++ /dev/null
@@ -1,45 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][int] $BarBuildId, # ID of the build which assets should be downloaded
- [Parameter(Mandatory=$true)][string] $DropLocation, # Where the assets should be downloaded to
- [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, # Token used to access Maestro API
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com", # Maestro API URL
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16" # Version of Maestro API to use
-)
-
-. $PSScriptRoot\post-build-utils.ps1
-
-try {
- Write-Host "Installing DARC ..."
-
- . $PSScriptRoot\..\darc-init.ps1
- $exitCode = $LASTEXITCODE
-
- if ($exitCode -ne 0) {
- Write-PipelineTaskError "Something failed while running 'darc-init.ps1'. Check for errors above. Exiting now..."
- ExitWithExitCode $exitCode
- }
-
- # For now, only use a dry run.
- # Ideally we would change darc to enable a quick request that
- # would check whether the file exists that you can download it,
- # and that it won't conflict with other files.
- # https://github.com/dotnet/arcade/issues/3674
- # Right now we can't remove continue-on-error because we ocassionally will have
- # dependencies that have no associated builds (e.g. an old dependency).
- # We need to add an option to baseline specific dependencies away, or add them manually
- # to the BAR.
- darc gather-drop --non-shipping `
- --dry-run `
- --continue-on-error `
- --id $BarBuildId `
- --output-dir $DropLocation `
- --bar-uri $MaestroApiEndpoint `
- --password $MaestroApiAccessToken `
- --latest-location
-}
-catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- ExitWithExitCode 1
-}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
index 78ed0d540f5..dab3534ab53 100644
--- a/eng/common/post-build/nuget-validation.ps1
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -6,20 +6,19 @@ param(
[Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
)
-. $PSScriptRoot\post-build-utils.ps1
-
try {
- $url = "https://raw.githubusercontent.com/NuGet/NuGetGallery/jver-verify/src/VerifyMicrosoftPackage/verify.ps1"
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1'
- New-Item -ItemType "directory" -Path ${ToolDestinationPath} -Force
+ New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
& ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
}
catch {
- Write-PipelineTaskError "NuGet package validation failed. Please check error logs."
- Write-Host $_
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
index 551ae113f89..7d49744795f 100644
--- a/eng/common/post-build/post-build-utils.ps1
+++ b/eng/common/post-build/post-build-utils.ps1
@@ -1,16 +1,17 @@
# Most of the functions in this file require the variables `MaestroApiEndPoint`,
# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
+$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
-function Create-MaestroApiRequestHeaders([string]$ContentType = "application/json") {
+function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
Validate-MaestroVars
$headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
@@ -50,20 +51,20 @@ function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
return $result
}
-function Trigger-Subscription([string]$SubscriptionId) {
+function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
Validate-MaestroVars
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
}
-function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
+function Trigger-Subscription([string]$SubscriptionId) {
Validate-MaestroVars
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
}
function Validate-MaestroVars {
@@ -72,18 +73,18 @@ function Validate-MaestroVars {
Get-Variable MaestroApiVersion -Scope Global | Out-Null
Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
- if (!($MaestroApiEndPoint -Match "^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$")) {
- Write-PipelineTaskError "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+ if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
ExitWithExitCode 1
}
- if (!($MaestroApiVersion -Match "^[0-9]{4}-[0-9]{2}-[0-9]{2}$")) {
- Write-PipelineTaskError "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
+ if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
ExitWithExitCode 1
}
}
catch {
- Write-PipelineTaskError "Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script."
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
Write-Host $_
ExitWithExitCode 1
}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
new file mode 100644
index 00000000000..650b13b089b
--- /dev/null
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -0,0 +1,74 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
+ [Parameter(Mandatory=$true)][string] $AzdoToken,
+ [Parameter(Mandatory=$true)][string] $MaestroToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
+ [Parameter(Mandatory=$false)][string] $EnableSourceLinkValidation,
+ [Parameter(Mandatory=$false)][string] $EnableSigningValidation,
+ [Parameter(Mandatory=$false)][string] $EnableNugetValidation,
+ [Parameter(Mandatory=$false)][string] $PublishInstallersAndChecksums,
+ [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $SigningValidationAdditionalParameters
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+ # Hard coding darc version till the next arcade-services roll out, cos this version has required API changes for darc add-build-to-channel
+ $darc = Get-Darc "1.1.0-beta.20418.1"
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ if ("" -ne $ArtifactsPublishingAdditionalParameters) {
+ $optionalParams.Add("artifact-publishing-parameters") | Out-Null
+ $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
+ }
+
+ if ("false" -eq $WaitPublishingFinish) {
+ $optionalParams.Add("--no-wait") | Out-Null
+ }
+
+ if ("false" -ne $PublishInstallersAndChecksums) {
+ $optionalParams.Add("--publish-installers-and-checksums") | Out-Null
+ }
+
+ if ("true" -eq $EnableNugetValidation) {
+ $optionalParams.Add("--validate-nuget") | Out-Null
+ }
+
+ if ("true" -eq $EnableSourceLinkValidation) {
+ $optionalParams.Add("--validate-sourcelinkchecksums") | Out-Null
+ }
+
+ if ("true" -eq $EnableSigningValidation) {
+ $optionalParams.Add("--validate-signingchecksums") | Out-Null
+
+ if ("" -ne $SigningValidationAdditionalParameters) {
+ $optionalParams.Add("--signing-validation-parameters") | Out-Null
+ $optionalParams.Add($SigningValidationAdditionalParameters) | Out-Null
+ }
+ }
+
+ & $darc add-build-to-channel `
+ --id $buildId `
+ --publishing-infra-version $PublishingInfraVersion `
+ --default-channels `
+ --source-branch master `
+ --azdev-pat $AzdoToken `
+ --bar-uri $MaestroApiEndPoint `
+ --password $MaestroToken `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
+ exit 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/setup-maestro-vars.ps1 b/eng/common/post-build/setup-maestro-vars.ps1
deleted file mode 100644
index d7f64dc63cb..00000000000
--- a/eng/common/post-build/setup-maestro-vars.ps1
+++ /dev/null
@@ -1,26 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $ReleaseConfigsPath # Full path to ReleaseConfigs.txt asset
-)
-
-. $PSScriptRoot\post-build-utils.ps1
-
-try {
- $Content = Get-Content $ReleaseConfigsPath
-
- $BarId = $Content | Select -Index 0
-
- $Channels = ""
- $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," }
-
- $IsStableBuild = $Content | Select -Index 2
-
- Write-PipelineSetVariable -Name 'BARBuildId' -Value $BarId
- Write-PipelineSetVariable -Name 'InitialChannels' -Value "$Channels"
- Write-PipelineSetVariable -Name 'IsStableBuild' -Value $IsStableBuild
-}
-catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- ExitWithExitCode 1
-}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
index bbfdacca130..c7e7ae67d81 100644
--- a/eng/common/post-build/sourcelink-validation.ps1
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -34,9 +34,9 @@ $ValidatePackage = {
# Extensions for which we'll look for SourceLink information
# For now we'll only care about Portable & Embedded PDBs
- $RelevantExtensions = @(".dll", ".exe", ".pdb")
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
- Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... "
+ Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
@@ -58,7 +58,7 @@ $ValidatePackage = {
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
# We ignore resource DLLs
- if ($FileName.EndsWith(".resources.dll")) {
+ if ($FileName.EndsWith('.resources.dll')) {
return
}
@@ -96,7 +96,7 @@ $ValidatePackage = {
$Uri = $Link -as [System.URI]
# Only GitHub links are valid
- if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match "github" -or $Uri.Host -match "githubusercontent")) {
+ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
}
else {
@@ -143,19 +143,35 @@ $ValidatePackage = {
}
if ($FailedFiles -eq 0) {
- Write-Host "Passed."
- return 0
+ Write-Host 'Passed.'
+ return [pscustomobject]@{
+ result = 0
+ packagePath = $PackagePath
+ }
}
else {
- Write-Host "$PackagePath has broken SourceLink links."
- return 1
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
+ return [pscustomobject]@{
+ result = 1
+ packagePath = $PackagePath
+ }
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$ValidationFailures) {
+ if ($jobResult.result -ne '0') {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
+ $ValidationFailures.Value++
}
}
function ValidateSourceLinkLinks {
- if ($GHRepoName -ne "" -and !($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) {
- if (!($GHRepoName -Match "^[^\s-]+-[^\s]+$")) {
- Write-PipelineTaskError "GHRepoName should be in the format / or -. '$GHRepoName'"
+ if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
+ if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
ExitWithExitCode 1
}
else {
@@ -163,14 +179,14 @@ function ValidateSourceLinkLinks {
}
}
- if ($GHCommit -ne "" -and !($GHCommit -Match "^[0-9a-fA-F]{40}$")) {
- Write-PipelineTaskError "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
+ if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
ExitWithExitCode 1
}
- if ($GHRepoName -ne "" -and $GHCommit -ne "") {
- $RepoTreeURL = -Join("http://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1")
- $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript")
+ if ($GHRepoName -ne '' -and $GHCommit -ne '') {
+ $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
+ $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
try {
# Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
@@ -188,14 +204,16 @@ function ValidateSourceLinkLinks {
Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
}
}
- elseif ($GHRepoName -ne "" -or $GHCommit -ne "") {
- Write-Host "For using the http caching mechanism both GHRepoName and GHCommit should be informed."
+ elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
+ Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
}
if (Test-Path $ExtractPath) {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
+ $ValidationFailures = 0
+
# Process each NuGet package in parallel
Get-ChildItem "$InputPath\*.symbols.nupkg" |
ForEach-Object {
@@ -209,26 +227,27 @@ function ValidateSourceLinkLinks {
}
foreach ($Job in @(Get-Job -State 'Completed')) {
- Receive-Job -Id $Job.Id
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
Remove-Job -Id $Job.Id
}
}
- $ValidationFailures = 0
foreach ($Job in @(Get-Job)) {
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
- if ($jobResult -ne "0") {
+ if ($jobResult -ne '0') {
$ValidationFailures++
}
+ Remove-Job -Id $Job.Id
}
if ($ValidationFailures -gt 0) {
- Write-PipelineTaskError " $ValidationFailures package(s) failed validation."
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
ExitWithExitCode 1
}
}
function InstallSourcelinkCli {
- $sourcelinkCliPackageName = "sourcelink"
+ $sourcelinkCliPackageName = 'sourcelink'
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
@@ -239,7 +258,7 @@ function InstallSourcelinkCli {
}
else {
Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
- Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
& "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
}
}
@@ -250,8 +269,8 @@ try {
ValidateSourceLinkLinks
}
catch {
- Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
index 096ac321d12..fcc6019b495 100644
--- a/eng/common/post-build/symbols-validation.ps1
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -1,127 +1,168 @@
param(
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion # Version of dotnet symbol to use
+ [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
+ [Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
+ [Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
)
-. $PSScriptRoot\post-build-utils.ps1
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 6
-Add-Type -AssemblyName System.IO.Compression.FileSystem
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
-function FirstMatchingSymbolDescriptionOrDefault {
+$CountMissingSymbols = {
param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
- [string] $SymbolsPath
+ [string] $PackagePath # Path to a NuGet package
)
- $FileName = [System.IO.Path]::GetFileName($FullPath)
- $Extension = [System.IO.Path]::GetExtension($FullPath)
-
- # Those below are potential symbol files that the `dotnet symbol` might
- # return. Which one will be returned depend on the type of file we are
- # checking and which type of file was uploaded.
-
- # The file itself is returned
- $SymbolPath = $SymbolsPath + "\" + $FileName
-
- # PDB file for the module
- $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
-
- # PDB file for R2R module (created by crossgen)
- $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
+ . $using:PSScriptRoot\..\tools.ps1
- # DBG file for a .so library
- $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
-
- # DWARF file for a .dylib
- $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
-
- $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
- $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
-
- & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
-
- if (Test-Path $PdbPath) {
- return "PDB"
- }
- elseif (Test-Path $NGenPdb) {
- return "NGen PDB"
- }
- elseif (Test-Path $SODbg) {
- return "DBG for SO"
- }
- elseif (Test-Path $DylibDwarf) {
- return "Dwarf for Dylib"
- }
- elseif (Test-Path $SymbolPath) {
- return "Module"
- }
- else {
- return $null
- }
-}
-
-function CountMissingSymbols {
- param(
- [string] $PackagePath # Path to a NuGet package
- )
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
Write-PipelineTaskError "Input file does not exist: $PackagePath"
- ExitWithExitCode 1
+ return -2
}
# Extensions for which we'll look for symbols
- $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
+ $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
# How many files are missing symbol information
$MissingSymbols = 0
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$PackageGuid = New-Guid
- $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
- $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
- [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+ try {
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+ }
+ catch {
+ Write-Host "Something went wrong extracting $PackagePath"
+ Write-Host $_
+ return [pscustomobject]@{
+ result = -1
+ packagePath = $PackagePath
+ }
+ }
Get-ChildItem -Recurse $ExtractPath |
Where-Object {$RelevantExtensions -contains $_.Extension} |
ForEach-Object {
- if ($_.FullName -Match "\\ref\\") {
- Write-Host "`t Ignoring reference assembly file" $_.FullName
+ $FileName = $_.FullName
+ if ($FileName -Match '\\ref\\') {
+ Write-Host "`t Ignoring reference assembly file " $FileName
return
}
- $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
- $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
+ $FirstMatchingSymbolDescriptionOrDefault = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + '\' + $FileName
- Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+ & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+
+ if (Test-Path $PdbPath) {
+ return 'PDB'
+ }
+ elseif (Test-Path $NGenPdb) {
+ return 'NGen PDB'
+ }
+ elseif (Test-Path $SODbg) {
+ return 'DBG for SO'
+ }
+ elseif (Test-Path $DylibDwarf) {
+ return 'Dwarf for Dylib'
+ }
+ elseif (Test-Path $SymbolPath) {
+ return 'Module'
+ }
+ else {
+ return $null
+ }
+ }
+
+ $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
+ $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
+
+ Write-Host -NoNewLine "`t Checking file " $FileName "... "
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
- Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
+ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
}
else {
$MissingSymbols++
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
- Write-Host "No symbols found on MSDL or SymWeb!"
+ Write-Host 'No symbols found on MSDL or SymWeb!'
}
else {
if ($SymbolsOnMSDL -eq $null) {
- Write-Host "No symbols found on MSDL!"
+ Write-Host 'No symbols found on MSDL!'
}
else {
- Write-Host "No symbols found on SymWeb!"
+ Write-Host 'No symbols found on SymWeb!'
}
}
}
}
+ if ($using:Clean) {
+ Remove-Item $ExtractPath -Recurse -Force
+ }
+
Pop-Location
- return $MissingSymbols
+ return [pscustomobject]@{
+ result = $MissingSymbols
+ packagePath = $PackagePath
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$DupedSymbols,
+ [ref]$TotalFailures) {
+ if ($result -eq '-1') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
+ $DupedSymbols.Value++
+ }
+ elseif ($jobResult.result -ne '0') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
+ $TotalFailures.Value++
+ }
}
function CheckSymbolsAvailable {
@@ -129,38 +170,71 @@ function CheckSymbolsAvailable {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
+ $TotalFailures = 0
+ $DupedSymbols = 0
+
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$FileName = $_.Name
-
+ $FullName = $_.FullName
+
# These packages from Arcade-Services include some native libraries that
# our current symbol uploader can't handle. Below is a workaround until
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
- if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
+ if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
- elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
+ elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
-
+
Write-Host "Validating $FileName "
- $Status = CountMissingSymbols "$InputPath\$FileName"
-
- if ($Status -ne 0) {
- Write-PipelineTaskError "Missing symbols for $Status modules in the package $FileName"
- ExitWithExitCode $exitCode
+
+ Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
+
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
}
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ Remove-Job -Id $Job.Id
+ }
Write-Host
}
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ }
+
+ if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
+ if ($TotalFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
+ }
+
+ if ($DupedSymbols -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
+ }
+
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host "All symbols validated!"
+ }
}
function InstallDotnetSymbol {
- $dotnetSymbolPackageName = "dotnet-symbol"
+ $dotnetSymbolPackageName = 'dotnet-symbol'
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
@@ -171,19 +245,24 @@ function InstallDotnetSymbol {
}
else {
Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
- Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
& "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
}
}
try {
+ . $PSScriptRoot\post-build-utils.ps1
+
InstallDotnetSymbol
+ foreach ($Job in @(Get-Job)) {
+ Remove-Job -Id $Job.Id
+ }
+
CheckSymbolsAvailable
}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
index 926d5b45513..55dea518ac5 100644
--- a/eng/common/post-build/trigger-subscriptions.ps1
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -2,56 +2,63 @@ param(
[Parameter(Mandatory=$true)][string] $SourceRepo,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com",
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16"
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
-. $PSScriptRoot\post-build-utils.ps1
+try {
+ . $PSScriptRoot\post-build-utils.ps1
-# Get all the $SourceRepo subscriptions
-$normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
-$subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
+ # Get all the $SourceRepo subscriptions
+ $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
+ $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
-if (!$subscriptions) {
- Write-Host "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
- ExitWithExitCode 0
-}
+ if (!$subscriptions) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
+ ExitWithExitCode 0
+ }
-$subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
-$failedTriggeredSubscription = $false
+ $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
+ $failedTriggeredSubscription = $false
-# Get all enabled subscriptions that need dependency flow on 'everyBuild'
-foreach ($subscription in $subscriptions) {
- if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
- Write-Host "Should trigger this subscription: $subscription.id"
- [void]$subscriptionsToTrigger.Add($subscription.id)
+ # Get all enabled subscriptions that need dependency flow on 'everyBuild'
+ foreach ($subscription in $subscriptions) {
+ if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
+ Write-Host "Should trigger this subscription: ${$subscription.id}"
+ [void]$subscriptionsToTrigger.Add($subscription.id)
+ }
}
-}
-foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
- try {
- Write-Host "Triggering subscription '$subscriptionToTrigger'."
-
- Trigger-Subscription -SubscriptionId $subscriptionToTrigger
-
- Write-Host "done."
- }
- catch
- {
- Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
- Write-Host $_
- Write-Host $_.ScriptStackTrace
- $failedTriggeredSubscription = $true
+ foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
+ try {
+ Write-Host "Triggering subscription '$subscriptionToTrigger'."
+
+ Trigger-Subscription -SubscriptionId $subscriptionToTrigger
+
+ Write-Host 'done.'
+ }
+ catch
+ {
+ Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ $failedTriggeredSubscription = $true
+ }
}
-}
-if ($subscriptionsToTrigger.Count -eq 0) {
- Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+ if ($subscriptionsToTrigger.Count -eq 0) {
+ Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+ }
+ elseif ($failedTriggeredSubscription) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'All subscriptions were triggered successfully!'
+ }
}
-elseif ($failedTriggeredSubscription) {
- Write-Host "At least one subscription failed to be triggered..."
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
ExitWithExitCode 1
}
-else {
- Write-Host "All subscriptions were triggered successfully!"
-}
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index d0eec5163ef..f55c43c6f47 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -1,8 +1,8 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
- [string] $configuration = "Debug",
+ [string] $configuration = 'Debug',
[string] $task,
- [string] $verbosity = "minimal",
+ [string] $verbosity = 'minimal',
[string] $msbuildEngine = $null,
[switch] $restore,
[switch] $prepareMachine,
@@ -32,7 +32,7 @@ function Print-Usage() {
}
function Build([string]$target) {
- $logSuffix = if ($target -eq "Execute") { "" } else { ".$target" }
+ $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
$outputPath = Join-Path $ToolsetDir "$task\\"
@@ -42,37 +42,55 @@ function Build([string]$target) {
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:BaseIntermediateOutputPath=$outputPath `
+ /v:$verbosity `
@properties
}
try {
- if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
Print-Usage
exit 0
}
if ($task -eq "") {
- Write-Host "Missing required parameter '-task '" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red
Print-Usage
ExitWithExitCode 1
}
+ if( $msbuildEngine -eq "vs") {
+ # Ensure desktop MSBuild is available for sdk tasks.
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
+ $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
+ }
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.8.0-preview3" -MemberType NoteProperty
+ }
+ if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
+ $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
+ }
+ if ($xcopyMSBuildToolsFolder -eq $null) {
+ throw 'Unable to get xcopy downloadable version of msbuild'
+ }
+
+ $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
+ }
+
$taskProject = GetSdkTaskProject $task
if (!(Test-Path $taskProject)) {
- Write-Host "Unknown task: $task" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
ExitWithExitCode 1
}
if ($restore) {
- Build "Restore"
+ Build 'Restore'
}
- Build "Execute"
+ Build 'Execute'
}
catch {
- Write-Host $_
- Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 01799d63ff3..b681d797cda 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -1,100 +1,120 @@
Param(
- [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
- [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
- [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
- [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
- [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
- [string] $ArtifactsDirectory = (Join-Path $env:BUILD_SOURCESDIRECTORY ("artifacts")), # Required: the directory where build artifacts are located
- [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
- [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
- [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
- [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
- [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
- [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $GuardianLoggerLevel="Standard", # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
- [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
- [string[]] $PoliCheckAdditionalRunConfigParams # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
+ [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
+ [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
+ [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
+ [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
+ [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
+ [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
+ [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
+ [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
+ [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
+ [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
+ [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
+ [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
)
-$ErrorActionPreference = "Stop"
-Set-StrictMode -Version 2.0
-$LASTEXITCODE = 0
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+ $LASTEXITCODE = 0
-#Replace repo names to the format of org/repo
-if (!($Repository.contains('/'))) {
- $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
-}
-else{
- $RepoName = $Repository;
-}
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
-if ($GuardianPackageName) {
- $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path "tools" "guardian.cmd"))
-} else {
- $guardianCliLocation = $GuardianCliLocation
-}
+ #Replace repo names to the format of org/repo
+ if (!($Repository.contains('/'))) {
+ $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
+ }
+ else{
+ $RepoName = $Repository;
+ }
-$workingDirectory = (Split-Path $SourceDirectory -Parent)
-$ValidPath = Test-Path $guardianCliLocation
+ if ($GuardianPackageName) {
+ $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
+ } else {
+ $guardianCliLocation = $GuardianCliLocation
+ }
-if ($ValidPath -eq $False)
-{
- Write-Host "Invalid Guardian CLI Location."
- exit 1
-}
+ $workingDirectory = (Split-Path $SourceDirectory -Parent)
+ $ValidPath = Test-Path $guardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
+ ExitWithExitCode 1
+ }
-& $(Join-Path $PSScriptRoot "init-sdl.ps1") -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
-$gdnFolder = Join-Path $workingDirectory ".gdn"
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ $gdnFolder = Join-Path $workingDirectory '.gdn'
-if ($TsaOnboard) {
- if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
- Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+ if ($TsaOnboard) {
+ if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
+ Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
+ ExitWithExitCode 1
}
- } else {
- Write-Host "Could not onboard to TSA -- not all required values ($$TsaCodebaseName, $$TsaNotificationEmail, $$TsaCodebaseAdmin, $$TsaBugAreaPath) were specified."
- exit 1
}
-}
-if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
-}
-if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
-}
+ if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
+ if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
-if ($UpdateBaseline) {
- & (Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Update baseline"
-}
+ if ($UpdateBaseline) {
+ & (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
+ }
-if ($TsaPublish) {
- if ($TsaBranchName -and $BuildNumber) {
- if (-not $TsaRepositoryName) {
- $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+ $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ }
+ Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
+ ExitWithExitCode 1
}
- Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian tsa-publish failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
- }
- } else {
- Write-Host "Could not publish to TSA -- not all required values ($$TsaBranchName, $$BuildNumber) were specified."
- exit 1
}
+
+ if ($BreakOnFailure) {
+ Write-Host "Failing the build in case of breaking results..."
+ & $guardianCliLocation break
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ exit 1
}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
index 6e6825013bf..7f28d9c59ec 100644
--- a/eng/common/sdl/extract-artifact-packages.ps1
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -3,54 +3,12 @@ param(
[Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
-# `tools.ps1` checks $ci to perform some actions. Since the post-build
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-. $PSScriptRoot\..\tools.ps1
+$disableConfigureToolsetImport = $true
-$ExtractPackage = {
- param(
- [string] $PackagePath # Full path to a NuGet package
- )
-
- if (!(Test-Path $PackagePath)) {
- Write-PipelineTaskError "Input file does not exist: $PackagePath"
- ExitWithExitCode 1
- }
-
- $RelevantExtensions = @(".dll", ".exe", ".pdb")
- Write-Host -NoNewLine "Extracting" ([System.IO.Path]::GetFileName($PackagePath)) "... "
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath);
-
- try {
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
- }
- }
- catch {
-
- }
- finally {
- $zip.Dispose()
- }
- }
- function ExtractArtifacts {
+function ExtractArtifacts {
if (!(Test-Path $InputPath)) {
Write-Host "Input Path does not exist: $InputPath"
ExitWithExitCode 0
@@ -67,11 +25,56 @@ $ExtractPackage = {
}
try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $ExtractPackage = {
+ param(
+ [string] $PackagePath # Full path to a NuGet package
+ )
+
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+ Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ }
+ }
+ catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+ }
+ finally {
+ $zip.Dispose()
+ }
+ }
Measure-Command { ExtractArtifacts }
}
catch {
Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
index c737eb0e71c..a68bf0b88ea 100644
--- a/eng/common/sdl/init-sdl.ps1
+++ b/eng/common/sdl/init-sdl.ps1
@@ -1,31 +1,37 @@
Param(
[string] $GuardianCliLocation,
[string] $Repository,
- [string] $BranchName="master",
+ [string] $BranchName='master',
[string] $WorkingDirectory,
[string] $AzureDevOpsAccessToken,
- [string] $GuardianLoggerLevel="Standard"
+ [string] $GuardianLoggerLevel='Standard'
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
+# `tools.ps1` checks $ci to perform some actions. Since the SDL
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
# Don't display the console progress UI - it's a huge perf hit
$ProgressPreference = 'SilentlyContinue'
# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
-$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0-preview.1"
+$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
$zipFile = "$WorkingDirectory/gdn.zip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
-$gdnFolder = (Join-Path $WorkingDirectory ".gdn")
-Try
-{
+$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
+try {
# We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
- Write-Host "Downloading gdn folder from internal config repostiory..."
+ Write-Host 'Downloading gdn folder from internal config repostiory...'
Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
if (Test-Path $gdnFolder) {
# Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
@@ -33,19 +39,29 @@ Try
}
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
Write-Host $gdnFolder
-} Catch [System.Net.WebException] {
+ ExitWithExitCode 0
+} catch [System.Net.WebException] { } # Catch and ignore webexception
+try {
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
- Write-Host "Initializing Guardian..."
+ Write-Host 'Initializing Guardian...'
Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
& $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
- Write-Error "Guardian init failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
}
# We create the mainbaseline so it can be edited later
Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
& $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
if ($LASTEXITCODE -ne 0) {
- Write-Error "Guardian baseline failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
}
- & $(Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Initialize gdn folder"
-}
\ No newline at end of file
+ & $(Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Initialize gdn folder'
+ ExitWithExitCode 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1
index 79c707d6d8a..d8fd2d82a68 100644
--- a/eng/common/sdl/push-gdn.ps1
+++ b/eng/common/sdl/push-gdn.ps1
@@ -1,51 +1,69 @@
Param(
[string] $Repository,
- [string] $BranchName="master",
+ [string] $BranchName='master',
[string] $GdnFolder,
[string] $AzureDevOpsAccessToken,
[string] $PushReason
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
-# We create the temp directory where we'll store the sdl-config repository
-$sdlDir = Join-Path $env:TEMP "sdl"
-if (Test-Path $sdlDir) {
- Remove-Item -Force -Recurse $sdlDir
-}
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
-Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
-git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git clone failed with exit code $LASTEXITCODE."
-}
-# We copy the .gdn folder from our local run into the git repository so it can be committed
-$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn"
-if (Get-Command Robocopy) {
- Robocopy /S $GdnFolder $sdlRepositoryFolder
-} else {
- rsync -r $GdnFolder $sdlRepositoryFolder
-}
-# cd to the sdl-config directory so we can run git there
-Push-Location $sdlDir
-# git add . --> git commit --> git push
-Write-Host "git add ."
-git add .
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git add failed with exit code $LASTEXITCODE."
-}
-Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
-git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git commit failed with exit code $LASTEXITCODE."
+ # We create the temp directory where we'll store the sdl-config repository
+ $sdlDir = Join-Path $env:TEMP 'sdl'
+ if (Test-Path $sdlDir) {
+ Remove-Item -Force -Recurse $sdlDir
+ }
+
+ Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+ git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ # We copy the .gdn folder from our local run into the git repository so it can be committed
+ $sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn'
+ if (Get-Command Robocopy) {
+ Robocopy /S $GdnFolder $sdlRepositoryFolder
+ } else {
+ rsync -r $GdnFolder $sdlRepositoryFolder
+ }
+ # cd to the sdl-config directory so we can run git there
+ Push-Location $sdlDir
+ # git add . --> git commit --> git push
+ Write-Host 'git add .'
+ git add .
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+ git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host 'git push'
+ git push
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+
+ # Return to the original directory
+ Pop-Location
}
-Write-Host "git push"
-git push
-if ($LASTEXITCODE -ne 0) {
- Write-Error "Git push failed with exit code $LASTEXITCODE."
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
}
-
-# Return to the original directory
-Pop-Location
\ No newline at end of file
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
index 9bc25314ae2..fe95ab35aa5 100644
--- a/eng/common/sdl/run-sdl.ps1
+++ b/eng/common/sdl/run-sdl.ps1
@@ -5,55 +5,69 @@ Param(
[string] $GdnFolder,
[string[]] $ToolsList,
[string] $UpdateBaseline,
- [string] $GuardianLoggerLevel="Standard",
+ [string] $GuardianLoggerLevel='Standard',
[string[]] $CrScanAdditionalRunConfigParams,
[string[]] $PoliCheckAdditionalRunConfigParams
)
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
-# We store config files in the r directory of .gdn
-Write-Host $ToolsList
-$gdnConfigPath = Join-Path $GdnFolder "r"
-$ValidPath = Test-Path $GuardianCliLocation
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
-if ($ValidPath -eq $False)
-{
- Write-Host "Invalid Guardian CLI Location."
- exit 1
-}
+ # We store config files in the r directory of .gdn
+ Write-Host $ToolsList
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
-$configParam = @("--config")
-
-foreach ($tool in $ToolsList) {
- $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
- Write-Host $tool
- # We have to manually configure tools that run on source to look at the source directory only
- if ($tool -eq "credscan") {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
- }
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
}
- if ($tool -eq "policheck") {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+
+ $configParam = @('--config')
+
+ foreach ($tool in $ToolsList) {
+ $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
+ Write-Host $tool
+ # We have to manually configure tools that run on source to look at the source directory only
+ if ($tool -eq 'credscan') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
}
+ if ($tool -eq 'policheck') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ }
+
+ $configParam+=$gdnConfigFile
}
- $configParam+=$gdnConfigFile
+ Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
+ & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
}
-
-Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
-& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
-if ($LASTEXITCODE -ne 0) {
- Write-Host "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
index bf09d2511c6..c64c4f5686c 100644
--- a/eng/common/templates/job/execute-sdl.yml
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -1,10 +1,12 @@
parameters:
+ enable: 'false' # Whether the SDL validation job should execute or not
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
+ downloadArtifacts: true # optional: determines if the artifacts should be dowloaded
dependsOn: '' # Optional: dependencies of the job
artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
# Usage:
@@ -16,37 +18,53 @@ jobs:
- job: Run_SDL
dependsOn: ${{ parameters.dependsOn }}
displayName: Run SDL tool
+ condition: eq( ${{ parameters.enable }}, 'true')
variables:
- group: DotNet-VSTS-Bot
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
pool:
name: Hosted VS2017
steps:
- checkout: self
clean: true
- - ${{ if ne(parameters.artifactNames, '') }}:
- - ${{ each artifactName in parameters.artifactNames }}:
+ - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
+ - ${{ if ne(parameters.artifactNames, '') }}:
+ - ${{ each artifactName in parameters.artifactNames }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ - ${{ if eq(parameters.artifactNames, '') }}:
- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
- buildType: current
- artifactName: ${{ artifactName }}
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: specific files
+ itemPattern: "**"
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- - ${{ if eq(parameters.artifactNames, '') }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: current
- downloadType: specific files
- itemPattern: "**"
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
displayName: Extract Blob Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
displayName: Extract Package Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- task: NuGetToolInstaller@1
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index ffda80a197b..8b81a7e5143 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,67 +1,36 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
-
condition: ''
-
- continueOnError: false
-
container: ''
-
+ continueOnError: false
dependsOn: ''
-
displayName: ''
-
- steps: []
-
pool: ''
-
+ steps: []
strategy: ''
-
timeoutInMinutes: ''
-
variables: []
-
workspace: ''
- # Job base template specific parameters
- # Optional: Enable installing Microbuild plugin
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _TeamName - the name of your team
- # _SignType - 'test' or 'real'
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
enableMicrobuild: false
-
- # Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing to the build asset registry
enablePublishBuildAssets: false
-
- # Optional: Prevent gather/push manifest from executing when using publishing pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Include PublishTestResults task
enablePublishTestResults: false
-
- # Optional: enable sending telemetry
- enableTelemetry: false
-
- # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
- helixRepo: ''
-
- # Optional: define the helix type for telemetry (example: 'build/product/')
- helixType: ''
-
- # Required: name of the job
+ enablePublishUsingPipelines: false
+ useBuildManifest: false
+ mergeTestResults: false
+ testRunTitle: ''
name: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ preSteps: []
runAsPublic: false
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
- job: ${{ parameters.name }}
@@ -93,9 +62,12 @@ jobs:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
variables:
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
@@ -125,21 +97,12 @@ jobs:
workspace: ${{ parameters.workspace }}
steps:
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- - task: sendStartTelemetry@0
- displayName: 'Send Helix Start Telemetry'
- inputs:
- helixRepo: ${{ parameters.helixRepo }}
- ${{ if ne(parameters.helixType, '') }}:
- helixType: ${{ parameters.helixType }}
- buildConfig: $(_BuildConfig)
- runAsPublic: ${{ parameters.runAsPublic }}
- continueOnError: ${{ parameters.continueOnError }}
- condition: always()
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
@@ -151,12 +114,28 @@ jobs:
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: NuGetAuthenticate@0
+ - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
- ${{ each step in parameters.steps }}:
- ${{ step }}
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: 'csharp'
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'prod') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ continueOnError: true
+
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
@@ -166,30 +145,82 @@ jobs:
env:
TeamName: $(_TeamName)
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- - task: sendEndTelemetry@0
- displayName: 'Send Helix End Telemetry'
- continueOnError: ${{ parameters.continueOnError }}
- condition: always()
-
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
- ArtifactName: $(Agent.Os)_$(Agent.JobName)
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
- ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
- task: PublishTestResults@2
- displayName: Publish Test Results
+ displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
@@ -210,3 +241,12 @@ jobs:
ArtifactName: AssetManifests
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if eq(parameters.useBuildManifest, true) }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Build Manifest
+ inputs:
+ PathToPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/manifest.props'
+ PublishLocation: Container
+ ArtifactName: BuildManifests
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index b722975f9c2..d0c3cc2b3ba 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -37,6 +37,12 @@ jobs:
- name: _BuildConfig
value: ${{ parameters.configuration }}
- group: Publish-Build-Assets
+ # Skip component governance and codesign validation for SDL. These jobs
+ # create no content.
+ - name: skipComponentGovernanceDetection
+ value: true
+ - name: runCodesignValidationInjection
+ value: false
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@@ -61,6 +67,7 @@ jobs:
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:Configuration=$(_BuildConfig)
+ /p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
@@ -81,11 +88,6 @@ jobs:
ArtifactName: ReleaseConfigs
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs to VSTS
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: $(Agent.Os)_PublishBuildAssets
- continueOnError: true
- condition: always()
+ - template: /eng/common/templates/steps/publish-logs.yml
+ parameters:
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
new file mode 100644
index 00000000000..9332f5ecc38
--- /dev/null
+++ b/eng/common/templates/job/source-build.yml
@@ -0,0 +1,49 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - template: /eng/common/templates/steps/source-build.yml
+ parameters:
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index 6a2f98c036f..08845950f44 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,19 +1,10 @@
parameters:
- # Optional: 'true' if failures in job.yml job should not fail the job
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
- # Optional: Enable installing Microbuild plugin
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _TeamName - the name of your team
- # _SignType - 'test' or 'real'
- enableMicrobuild: false
-
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
- # Optional: Enable publishing to the build asset registry
- enablePublishBuildAssets: false
-
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
@@ -23,19 +14,9 @@ parameters:
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
- # Optional: Include PublishTestResults task
- enablePublishTestResults: false
-
- # Optional: enable sending telemetry
- # if enabled then the 'helixRepo' parameter should also be specified
- enableTelemetry: false
-
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
- # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
- helixRepo: ''
-
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
@@ -43,6 +24,13 @@ parameters:
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
+ # Optional: Enable running the source-build jobs to build repo from source
+ runSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
@@ -62,29 +50,39 @@ jobs:
name: ${{ job.job }}
-- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- pool:
- vmImage: vs2017-win2016
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
-
-- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - template: ../job/generate-graph-files.yml
+- ${{ if eq(parameters.runSourceBuild, true) }}:
+ - template: /eng/common/templates/jobs/source-build.yml
parameters:
- continueOnError: ${{ parameters.continueOnError }}
- includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
- dependsOn:
- - Asset_Registry_Publish
- pool:
- vmImage: vs2017-win2016
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.runSourceBuild, true) }}:
+ - Source_Build_Complete
+ pool:
+ vmImage: vs2017-win2016
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+
+ - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
+ - template: ../job/generate-graph-files.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
+ dependsOn:
+ - Asset_Registry_Publish
+ pool:
+ vmImage: vs2017-win2016
diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
new file mode 100644
index 00000000000..f463011e793
--- /dev/null
+++ b/eng/common/templates/jobs/source-build.yml
@@ -0,0 +1,48 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # If changed to true, causes this template to include the default platform for a managed-only
+ # repo. The exact Docker image used for this build will be provided by Arcade. This has some risk,
+ # but since the repo is supposed to be managed-only, the risk should be very low.
+ includeDefaultManagedPlatform: false
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'.
+ platforms: []
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(parameters.includeDefaultManagedPlatform, true) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/templates/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+- ${{ if eq(parameters.includeDefaultManagedPlatform, true) }}:
+ - template: /eng/common/templates/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
index ad9375f5e5c..7ae5255921a 100644
--- a/eng/common/templates/post-build/channels/generic-internal-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
@@ -1,5 +1,10 @@
parameters:
- publishInstallersAndChecksums: false
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: true
symbolPublishingAdditionalParameters: ''
stageName: ''
channelName: ''
@@ -10,19 +15,28 @@ parameters:
stages:
- stage: ${{ parameters.stageName }}
- dependsOn: validate
+ dependsOn: ${{ parameters.dependsOn }}
variables:
- template: ../common-variables.yml
displayName: ${{ parameters.channelName }} Publishing
jobs:
- template: ../setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- - job:
+ - job: publish_symbols
displayName: Symbol Publishing
dependsOn: setupMaestroVars
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
variables:
- group: DotNet-Symbol-Server-Pats
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
pool:
vmImage: 'windows-2019'
steps:
@@ -31,16 +45,31 @@ stages:
displayName: 'Authenticate to AzDO Feeds'
- task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- artifactName: 'BlobArtifacts'
+ displayName: Download Build Assets
continueOnError: true
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
- - task: DownloadBuildArtifacts@0
- displayName: Download PDB Artifacts
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
inputs:
- artifactName: 'PDBArtifacts'
- continueOnError: true
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
- task: PowerShell@2
displayName: Publish
@@ -53,39 +82,48 @@ stages:
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
/p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
/p:Configuration=Release
+ /p:PublishToMSDL=false
${{ parameters.symbolPublishingAdditionalParameters }}
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
- job: publish_assets
displayName: Publish Assets
dependsOn: setupMaestroVars
+ timeoutInMinutes: 120
variables:
- - group: DotNet-Blob-Feed
- - group: AzureDevOps-Artifact-Feeds-Pats
- name: BARBuildId
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
- name: IsStableBuild
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }}))
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
pool:
vmImage: 'windows-2019'
steps:
- task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: current
- artifactName: PackageArtifacts
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: current
- artifactName: BlobArtifacts
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Asset Manifests
+ displayName: Download Build Assets
+ continueOnError: true
inputs:
- buildType: current
- artifactName: AssetManifests
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PackageArtifacts/**
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
@@ -105,6 +143,7 @@ stages:
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:PublishingInfraVersion=2
/p:IsStableBuild=$(IsStableBuild)
/p:IsInternalBuild=$(IsInternalBuild)
/p:RepositoryName=$(Build.Repository.Name)
@@ -119,12 +158,11 @@ stages:
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
/p:Configuration=Release
- /p:PublishInstallersAndChecksums=true
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
/p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl)
/p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
/p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
/p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
- /p:PublishToAzureDevOpsNuGetFeeds=true
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
@@ -134,6 +172,11 @@ stages:
/p:PublishToMSDL=false
${{ parameters.artifactsPublishingAdditionalParameters }}
- - template: ../../steps/promote-build.yml
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/add-build-to-channel.yml
parameters:
ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
index c4bc1897d81..6cf39dbb290 100644
--- a/eng/common/templates/post-build/channels/generic-public-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
@@ -1,6 +1,10 @@
parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
artifactsPublishingAdditionalParameters: ''
- publishInstallersAndChecksums: false
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: true
symbolPublishingAdditionalParameters: ''
stageName: ''
channelName: ''
@@ -8,36 +12,50 @@ parameters:
transportFeed: ''
shippingFeed: ''
symbolsFeed: ''
+ # If the channel name is empty, no links will be generated
+ akaMSChannelName: ''
stages:
- stage: ${{ parameters.stageName }}
- dependsOn: validate
+ dependsOn: ${{ parameters.dependsOn }}
variables:
- template: ../common-variables.yml
displayName: ${{ parameters.channelName }} Publishing
jobs:
- template: ../setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- - job:
+ - job: publish_symbols
displayName: Symbol Publishing
dependsOn: setupMaestroVars
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
variables:
- group: DotNet-Symbol-Server-Pats
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
pool:
vmImage: 'windows-2019'
steps:
- task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- artifactName: 'BlobArtifacts'
+ displayName: Download Build Assets
continueOnError: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download PDB Artifacts
inputs:
- artifactName: 'PDBArtifacts'
- continueOnError: true
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
@@ -64,37 +82,47 @@ stages:
/p:Configuration=Release
${{ parameters.symbolPublishingAdditionalParameters }}
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
- job: publish_assets
displayName: Publish Assets
dependsOn: setupMaestroVars
+ timeoutInMinutes: 120
variables:
- name: BARBuildId
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
- name: IsStableBuild
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }}))
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ - name: ArtifactsCategory
+ value: ${{ coalesce(variables._DotNetArtifactsCategory, '.NETCore') }}
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
pool:
vmImage: 'windows-2019'
steps:
- task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: current
- artifactName: PackageArtifacts
+ displayName: Download Build Assets
continueOnError: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
inputs:
- buildType: current
- artifactName: BlobArtifacts
- continueOnError: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Asset Manifests
- inputs:
- buildType: current
- artifactName: AssetManifests
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PackageArtifacts/**
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
@@ -114,7 +142,8 @@ stages:
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
- /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:PublishingInfraVersion=2
+ /p:ArtifactsCategory=$(ArtifactsCategory)
/p:IsStableBuild=$(IsStableBuild)
/p:IsInternalBuild=$(IsInternalBuild)
/p:RepositoryName=$(Build.Repository.Name)
@@ -134,15 +163,22 @@ stages:
/p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
/p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
/p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
- /p:PublishToAzureDevOpsNuGetFeeds=true
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
/p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
/p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
/p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:LatestLinkShortUrlPrefix=dotnet/'${{ parameters.akaMSChannelName }}'
+ /p:AkaMSClientId=$(akams-client-id)
+ /p:AkaMSClientSecret=$(akams-client-secret)
${{ parameters.artifactsPublishingAdditionalParameters }}
- - template: ../../steps/promote-build.yml
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/add-build-to-channel.yml
parameters:
ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index 1883d2b1700..c99fd750376 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -4,13 +4,13 @@ variables:
- group: DotNet-DotNetCli-Storage
- group: DotNet-MSRC-Storage
- group: Publish-Build-Assets
-
+
# .NET Core 3.1 Dev
- name: PublicDevRelease_31_Channel_Id
value: 128
- # .NET Core 5 Dev
- - name: NetCore_5_Dev_Channel_Id
+ # .NET 5 Dev
+ - name: Net_5_Dev_Channel_Id
value: 131
# .NET Eng - Validation
@@ -29,6 +29,14 @@ variables:
- name: NetCore_3_Tools_Channel_Id
value: 344
+ # .NET Core 3.0 Internal Servicing
+ - name: InternalServicing_30_Channel_Id
+ value: 184
+
+ # .NET Core 3.0 Release
+ - name: PublicRelease_30_Channel_Id
+ value: 19
+
# .NET Core 3.1 Release
- name: PublicRelease_31_Channel_Id
value: 129
@@ -41,6 +49,10 @@ variables:
- name: NetCore_31_Blazor_Features_Channel_Id
value: 531
+ # .NET Core Experimental
+ - name: NetCore_Experimental_Channel_Id
+ value: 562
+
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
@@ -51,7 +63,7 @@ variables:
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
- value: "2019-01-16"
+ value: "2020-02-20"
- name: SourceLinkCLIVersion
value: 3.0.0
@@ -78,3 +90,10 @@ variables:
value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json
- name: InternalInstallersBlobFeedKey
value: $(dotnetclimsrc-access-key)
+
+ # Skip component governance and codesign validation for SDL. These jobs
+ # create no content.
+ - name: skipComponentGovernanceDetection
+ value: true
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/templates/post-build/darc-gather-drop.yml b/eng/common/templates/post-build/darc-gather-drop.yml
deleted file mode 100644
index 3268ccaa551..00000000000
--- a/eng/common/templates/post-build/darc-gather-drop.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-parameters:
- ChannelId: 0
-
-jobs:
-- job: gatherDrop
- displayName: Gather Drop
- dependsOn: setupMaestroVars
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
- variables:
- - name: BARBuildId
- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
- pool:
- vmImage: 'windows-2019'
- steps:
- - task: PowerShell@2
- displayName: Darc gather-drop
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/darc-gather-drop.ps1
- arguments: -BarBuildId $(BARBuildId)
- -DropLocation $(Agent.BuildDirectory)/Temp/Drop/
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index 8ad0f9f66c2..0854e489615 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -1,392 +1,606 @@
parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V2 accepts optionally outlining the publishing stages - default is inline.
+ # Publishing V3 DOES NOT accept inlining the publishing stages.
+ publishingInfraVersion: 2
+ # When set to true the publishing templates from the repo will be used
+ # otherwise Darc add-build-to-channel will be used to trigger the promotion pipeline
+ inline: true
+
+ # Only used if inline==false. When set to true will stall the current build until
+ # the Promotion Pipeline build finishes. Otherwise, the current build will continue
+ # execution concurrently with the promotion build.
+ waitPublishingFinish: true
+
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
enableSourceLinkValidation: false
enableSigningValidation: true
enableSymbolValidation: false
enableNugetValidation: true
- publishInstallersAndChecksums: false
+ publishInstallersAndChecksums: true
SDLValidationParameters:
enable: false
continueOnError: false
params: ''
artifactNames: ''
+ downloadArtifacts: true
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
symbolPublishingAdditionalParameters: ''
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
+ useBuildManifest: false
# Which stages should finish execution before post-build stages start
- dependsOn: [build]
-
+ validateDependsOn:
+ - build
+ publishDependsOn:
+ - Validate
+
+ # Channel ID's instantiated in this file.
+ # When adding a new channel implementation the call to `check-channel-consistency.ps1`
+ # needs to be updated with the new channel ID
+ NetEngLatestChannelId: 2
+ NetEngValidationChannelId: 9
+ NetDev5ChannelId: 131
+ NetDev6ChannelId: 1296
+ GeneralTestingChannelId: 529
+ NETCoreToolingDevChannelId: 548
+ NETCoreToolingReleaseChannelId: 549
+ NETInternalToolingChannelId: 551
+ NETCoreExperimentalChannelId: 562
+ NetEngServicesIntChannelId: 678
+ NetEngServicesProdChannelId: 679
+ Net5Preview8ChannelId: 1155
+ Net5RC1ChannelId: 1157
+ Net5RC2ChannelId: 1329
+ NetCoreSDK313xxChannelId: 759
+ NetCoreSDK313xxInternalChannelId: 760
+ NetCoreSDK314xxChannelId: 921
+ NetCoreSDK314xxInternalChannelId: 922
+ VS166ChannelId: 1010
+ VS167ChannelId: 1011
+ VS168ChannelId: 1154
+ VSMasterChannelId: 1012
+
stages:
-- stage: validate
- dependsOn: ${{ parameters.dependsOn }}
- displayName: Validate
+- stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
jobs:
- - ${{ if eq(parameters.enableNugetValidation, 'true') }}:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
- job:
- displayName: NuGet Validation
+ displayName: Post-build Checks
+ dependsOn: setupMaestroVars
+ variables:
+ - name: TargetChannels
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'] ]
pool:
vmImage: 'windows-2019'
steps:
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: current
- artifactName: PackageArtifacts
-
- task: PowerShell@2
- displayName: Validate
+ displayName: Maestro Channels Consistency
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - ${{ if eq(parameters.enableSigningValidation, 'true') }}:
- - job:
- displayName: Signing Validation
- pool:
- vmImage: 'windows-2019'
- steps:
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@0
- displayName: 'Authenticate to AzDO Feeds'
-
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/check-channel-consistency.ps1
+ arguments: -PromoteToChannels "$(TargetChannels)"
+ -AvailableChannelIds ${{parameters.NetEngLatestChannelId}},${{parameters.NetEngValidationChannelId}},${{parameters.NetDev5ChannelId}},${{parameters.NetDev6ChannelId}},${{parameters.GeneralTestingChannelId}},${{parameters.NETCoreToolingDevChannelId}},${{parameters.NETCoreToolingReleaseChannelId}},${{parameters.NETInternalToolingChannelId}},${{parameters.NETCoreExperimentalChannelId}},${{parameters.NetEngServicesIntChannelId}},${{parameters.NetEngServicesProdChannelId}},${{parameters.Net5Preview8ChannelId}},${{parameters.Net5RC1ChannelId}},${{parameters.Net5RC2ChannelId}},${{parameters.NetCoreSDK313xxChannelId}},${{parameters.NetCoreSDK313xxInternalChannelId}},${{parameters.NetCoreSDK314xxChannelId}},${{parameters.NetCoreSDK314xxInternalChannelId}},${{parameters.VS166ChannelId}},${{parameters.VS167ChannelId}},${{parameters.VS168ChannelId}},${{parameters.VSMasterChannelId}}
+
+ - job:
+ displayName: NuGet Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
+ pool:
+ vmImage: 'windows-2019'
+ variables:
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableSigningValidation }}, 'true')
+ variables:
+ - template: common-variables.yml
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - ${{ if eq(parameters.useBuildManifest, true) }}:
- task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
+ displayName: Download build manifest
inputs:
- buildType: current
- artifactName: PackageArtifacts
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BuildManifests
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+
+ - job:
+ displayName: SourceLink Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ variables:
+ - template: common-variables.yml
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+ - template: /eng/common/templates/job/execute-sdl.yml
+ parameters:
+ enable: ${{ parameters.SDLValidationParameters.enable }}
+ dependsOn: setupMaestroVars
+ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+ artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
+ downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
+
+- ${{ if or(ge(parameters.publishingInfraVersion, 3), eq(parameters.inline, 'false')) }}:
+ - stage: publish_using_darc
+ dependsOn: Validate
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+ jobs:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine dotnet
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- /p:Configuration=Release
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
- job:
- displayName: SourceLink Validation
+ displayName: Publish Using Darc
+ dependsOn: setupMaestroVars
variables:
- - template: common-variables.yml
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
pool:
vmImage: 'windows-2019'
steps:
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: current
- artifactName: BlobArtifacts
-
- task: PowerShell@2
- displayName: Validate
+ displayName: Publish Using Darc
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
- - ${{ if eq(parameters.SDLValidationParameters.enable, 'true') }}:
- - template: /eng/common/templates/job/execute-sdl.yml
- parameters:
- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_Dev31_Publish'
- channelName: '.NET Core 3.1 Dev'
- channelId: 128
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'Net_Eng_Latest_Publish'
- channelName: '.NET Eng - Latest'
- channelId: 2
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'Net_Eng_Validation_Publish'
- channelName: '.NET Eng - Validation'
- channelId: 9
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_3_Tools_Validation_Publish'
- channelName: '.NET 3 Tools - Validation'
- channelId: 390
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_3_Tools_Publish'
- channelName: '.NET 3 Tools'
- channelId: 344
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_Release31_Publish'
- channelName: '.NET Core 3.1 Release'
- channelId: 129
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_Blazor31_Features_Publish'
- channelName: '.NET Core 3.1 Blazor Features'
- channelId: 531
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NetCore_31_Internal_Servicing_Publishing'
- channelName: '.NET Core 3.1 Internal Servicing'
- channelId: 550
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'General_Testing_Publish'
- channelName: 'General Testing'
- channelId: 529
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_Tooling_Dev_Publishing'
- channelName: '.NET Core Tooling Dev'
- channelId: 548
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_Tooling_Release_Publishing'
- channelName: '.NET Core Tooling Release'
- channelId: 549
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_311xx_Publishing'
- channelName: '.NET Core SDK 3.1.1xx'
- channelId: 560
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_311xx_Internal_Publishing'
- channelName: '.NET Core SDK 3.1.1xx Internal'
- channelId: 559
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_312xx_Publishing'
- channelName: '.NET Core SDK 3.1.2xx'
- channelId: 558
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_312xx_Internal_Publishing'
- channelName: '.NET Core SDK 3.1.2xx Internal'
- channelId: 557
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_313xx_Publishing'
- channelName: '.NET Core SDK 3.1.3xx'
- channelId: 759
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_313xx_Internal_Publishing'
- channelName: '.NET Core SDK 3.1.3xx Internal'
- channelId: 760
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_314xx_Publishing'
- channelName: '.NET Core SDK 3.1.4xx'
- channelId: 921
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_SDK_314xx_Internal_Publishing'
- channelName: '.NET Core SDK 3.1.4xx Internal'
- channelId: 922
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'VS16_6_Publishing'
- channelName: 'VS 16.6'
- channelId: 1010
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'VS16_7_Publishing'
- channelName: 'VS 16.7'
- channelId: 1011
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'VS16_8_Publishing'
- channelName: 'VS 16.8'
- channelId: 1154
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'VS16_9_Publishing'
- channelName: 'VS 16.9'
- channelId: 1473
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
-
-- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
- parameters:
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'VS_Master_Publishing'
- channelName: 'VS Master'
- channelId: 1012
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
\ No newline at end of file
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.PublishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish ${{ parameters.waitPublishingFinish }}
+ -PublishInstallersAndChecksums ${{ parameters.publishInstallersAndChecksums }}
+
+- ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev5_Publish'
+ channelName: '.NET 5 Dev'
+ akaMSChannelName: 'net5/dev'
+ channelId: ${{ parameters.NetDev5ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev6_Publish'
+ channelName: '.NET 6 Dev'
+ akaMSChannelName: 'net6/dev'
+ channelId: ${{ parameters.NetDev6ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_Preview8_Publish'
+ channelName: '.NET 5 Preview 8'
+ akaMSChannelName: 'net5/preview8'
+ channelId: ${{ parameters.Net5Preview8ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_RC1_Publish'
+ channelName: '.NET 5 RC 1'
+ akaMSChannelName: 'net5/rc1'
+ channelId: ${{ parameters.Net5RC1ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_RC2_Publish'
+ channelName: '.NET 5 RC 2'
+ akaMSChannelName: 'net5/rc2'
+ channelId: ${{ parameters.Net5RC2ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Latest_Publish'
+ channelName: '.NET Eng - Latest'
+ akaMSChannelName: 'eng/daily'
+ channelId: ${{ parameters.NetEngLatestChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Validation_Publish'
+ channelName: '.NET Eng - Validation'
+ akaMSChannelName: 'eng/validation'
+ channelId: ${{ parameters.NetEngValidationChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'General_Testing_Publish'
+ channelName: 'General Testing'
+ akaMSChannelName: 'generaltesting'
+ channelId: ${{ parameters.GeneralTestingChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Dev_Publishing'
+ channelName: '.NET Core Tooling Dev'
+ channelId: ${{ parameters.NETCoreToolingDevChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Release_Publishing'
+ channelName: '.NET Core Tooling Release'
+ channelId: ${{ parameters.NETCoreToolingReleaseChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NET_Internal_Tooling_Publishing'
+ channelName: '.NET Internal Tooling'
+ channelId: ${{ parameters.NETInternalToolingChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Experimental_Publishing'
+ channelName: '.NET Core Experimental'
+ channelId: ${{ parameters.NETCoreExperimentalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Services_Int_Publish'
+ channelName: '.NET Eng Services - Int'
+ channelId: ${{ parameters.NetEngServicesIntChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Services_Prod_Publish'
+ channelName: '.NET Eng Services - Prod'
+ channelId: ${{ parameters.NetEngServicesProdChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_314xx_Publishing'
+ channelName: '.NET Core SDK 3.1.4xx'
+ channelId: ${{ parameters.NetCoreSDK314xxChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_314xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.4xx Internal'
+ channelId: ${{ parameters.NetCoreSDK314xxInternalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_313xx_Publishing'
+ channelName: '.NET Core SDK 3.1.3xx'
+ channelId: ${{ parameters.NetCoreSDK313xxChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_313xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.3xx Internal'
+ channelId: ${{ parameters.NetCoreSDK313xxInternalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_6_Publishing'
+ channelName: 'VS 16.6'
+ channelId: ${{ parameters.VS166ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_7_Publishing'
+ channelName: 'VS 16.7'
+ channelId: ${{ parameters.VS167ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_8_Publishing'
+ channelName: 'VS 16.8'
+ channelId: ${{ parameters.VS168ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS_Master_Publishing'
+ channelName: 'VS Master'
+ channelId: ${{ parameters.VSMasterChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
diff --git a/eng/common/templates/post-build/promote-build.yml b/eng/common/templates/post-build/promote-build.yml
deleted file mode 100644
index 6b479c3b82a..00000000000
--- a/eng/common/templates/post-build/promote-build.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-parameters:
- ChannelId: 0
-
-jobs:
-- job:
- displayName: Promote Build
- dependsOn: setupMaestroVars
- condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
- variables:
- - name: BARBuildId
- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
- - name: ChannelId
- value: ${{ parameters.ChannelId }}
- pool:
- vmImage: 'windows-2019'
- steps:
- - task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId $(ChannelId)
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
index 56242b068e1..d0cbfb6c6ff 100644
--- a/eng/common/templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -1,18 +1,77 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
jobs:
- job: setupMaestroVars
displayName: Setup Maestro Vars
+ variables:
+ - template: common-variables.yml
pool:
vmImage: 'windows-2019'
steps:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
+ - checkout: none
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
- task: PowerShell@2
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/setup-maestro-vars.ps1
- arguments: -ReleaseConfigsPath '$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt'
+ targetType: inline
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId;isOutput=true]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels;isOutput=true]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild;isOutput=true]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName;isOutput=true]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId;isOutput=true]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId;isOutput=true]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates/steps/promote-build.yml b/eng/common/templates/steps/add-build-to-channel.yml
similarity index 68%
rename from eng/common/templates/steps/promote-build.yml
rename to eng/common/templates/steps/add-build-to-channel.yml
index b90404435dd..f67a210d62f 100644
--- a/eng/common/templates/steps/promote-build.yml
+++ b/eng/common/templates/steps/add-build-to-channel.yml
@@ -5,9 +5,9 @@ steps:
- task: PowerShell@2
displayName: Add Build to Channel
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
arguments: -BuildId $(BARBuildId)
-ChannelId ${{ parameters.ChannelId }}
-MaestroApiAccessToken $(MaestroApiAccessToken)
-MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
index b3ea9acf1f1..a468e92ce44 100644
--- a/eng/common/templates/steps/perf-send-to-helix.yml
+++ b/eng/common/templates/steps/perf-send-to-helix.yml
@@ -1,5 +1,6 @@
# Please remember to update the documentation if you make changes to these parameters!
parameters:
+ ProjectFile: '' # required -- project file that specifies the helix workitems
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
@@ -10,7 +11,7 @@ parameters:
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
@@ -18,33 +19,18 @@ parameters:
DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+ osGroup: '' # required -- operating system for the job
+
steps:
- - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 $(Build.SourcesDirectory)\eng\common\performance\perfhelixpublish.proj /restore /t:Test /bl:$(Build.SourcesDirectory)\artifacts\log\$env:BuildConfig\SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
+ parameters:
+ osGroup: ${{ parameters.osGroup }}
+ sendParams: $(Build.SourcesDirectory)/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }}
+ condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/performance/perfhelixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
+ environment:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
@@ -62,5 +48,3 @@ steps:
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
new file mode 100644
index 00000000000..88f238f36bf
--- /dev/null
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -0,0 +1,23 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 05df886f55f..bb5f1a92938 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -10,7 +10,7 @@ parameters:
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in seconds for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
@@ -18,11 +18,12 @@ parameters:
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
@@ -55,6 +56,7 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
@@ -85,6 +87,7 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
new file mode 100644
index 00000000000..8e336b7d16b
--- /dev/null
+++ b/eng/common/templates/steps/source-build.yml
@@ -0,0 +1,66 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack --publish \
+ $officialBuildArgs \
+ $targetRidArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/source-build/self/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- task: PublishPipelineArtifact@1
+ displayName: Publish BuildLogs
+ inputs:
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index f50507a06c1..9014e062514 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -5,11 +5,13 @@
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
-[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
+[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' }
+
+# Set to true to opt out of outputting binary log while running in CI
+[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false }
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
-# Binary log must be enabled on CI.
-[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci }
+[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog }
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
@@ -24,7 +26,7 @@
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
# Adjusts msbuild verbosity level.
-[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
+[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' }
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
@@ -41,22 +43,31 @@
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dot.net/v1/dotnet-install.ps1
-[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { "v1" }
+[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
# An array of names of processes to stop on script exit if prepareMachine is true.
-$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") }
+$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
+
+$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null }
set-strictmode -version 2.0
-$ErrorActionPreference = "Stop"
+$ErrorActionPreference = 'Stop'
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
-function Create-Directory([string[]] $path) {
- if (!(Test-Path $path)) {
- New-Item -path $path -force -itemType "Directory" | Out-Null
- }
+# If specified, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first.
+[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null }
+# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
+[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
+
+# If false, use copy of dotnet-install from /eng/common/dotnet-install-scripts (for custom behaviors).
+# otherwise will fetch from public location.
+[bool]$useDefaultDotnetInstall = if (Test-Path variable:useDefaultDotnetInstall) { $useDefaultDotnetInstall } else { $false }
+
+function Create-Directory ([string[]] $path) {
+ New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
}
function Unzip([string]$zipfile, [string]$outpath) {
@@ -96,7 +107,10 @@ function Exec-Process([string]$command, [string]$commandArgs) {
}
}
-function InitializeDotNetCli([bool]$install) {
+# createSdkLocationFile parameter enables a file being generated under the toolset directory
+# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
+# as dot sourcing isn't possible.
+function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
if (Test-Path variable:global:_DotNetInstallDir) {
return $global:_DotNetInstallDir
}
@@ -134,13 +148,13 @@ function InitializeDotNetCli([bool]$install) {
if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
- $dotnetRoot = Join-Path $RepoRoot ".dotnet"
+ $dotnetRoot = Join-Path $RepoRoot '.dotnet'
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
if ($install) {
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
} else {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
ExitWithExitCode 1
}
}
@@ -148,6 +162,24 @@ function InitializeDotNetCli([bool]$install) {
$env:DOTNET_INSTALL_DIR = $dotnetRoot
}
+ # Creates a temporary file under the toolset dir.
+ # The following code block is protecting against concurrent access so that this function can
+ # be called in parallel.
+ if ($createSdkLocationFile) {
+ do {
+ $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName())
+ }
+ until (!(Test-Path $sdkCacheFileTemp))
+ Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
+
+ try {
+ Rename-Item -Force -Path $sdkCacheFileTemp 'sdk.txt'
+ } catch {
+ # Somebody beat us
+ Remove-Item -Path $sdkCacheFileTemp
+ }
+ }
+
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
# It also ensures that VS msbuild will use the downloaded sdk targets.
@@ -156,15 +188,6 @@ function InitializeDotNetCli([bool]$install) {
# Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
- # Work around issues with Azure Artifacts credential provider
- # https://github.com/dotnet/arcade/issues/3932
- if ($ci) {
- $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
- $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
- Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
- Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
- }
-
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
@@ -172,27 +195,63 @@ function InitializeDotNetCli([bool]$install) {
}
function GetDotNetInstallScript([string] $dotnetRoot) {
- $installScript = Join-Path $dotnetRoot "dotnet-install.ps1"
+ $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
if (!(Test-Path $installScript)) {
- Create-Directory $dotnetRoot
- $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- Invoke-WebRequest "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1" -OutFile $installScript
+ create-directory $dotnetroot
+
+ if ($useDefaultDotnetInstall)
+ {
+ $progresspreference = 'silentlycontinue' # don't display the console progress ui - it's a huge perf hit
+
+ $maxretries = 5
+ $retries = 1
+
+ $uri = "https://dot.net/$dotnetinstallscriptversion/dotnet-install.ps1"
+
+ while($true) {
+ try {
+ write-host "get $uri"
+ invoke-webrequest $uri -outfile $installscript
+ break
+ }
+ catch {
+ write-host "failed to download '$uri'"
+ write-error $_.exception.message -erroraction continue
+ }
+
+ if (++$retries -le $maxretries) {
+ $delayinseconds = [math]::pow(2, $retries) - 1 # exponential backoff
+ write-host "retrying. waiting for $delayinseconds seconds before next attempt ($retries of $maxretries)."
+ start-sleep -seconds $delayinseconds
+ }
+ else {
+ throw "unable to download file in $maxretries attempts."
+ }
+ }
+ }
+ else
+ {
+ # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+ # See https://github.com/dotnet/arcade/issues/6047 for details
+ $engCommonCopy = Resolve-Path (Join-Path $PSScriptRoot 'dotnet-install-scripts\dotnet-install.ps1')
+ Copy-Item $engCommonCopy -Destination $installScript -Force
+ }
}
-
return $installScript
}
-function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") {
- InstallDotNet $dotnetRoot $version $architecture
+function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) {
+ InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath
}
-function InstallDotNet([string] $dotnetRoot,
- [string] $version,
- [string] $architecture = "",
- [string] $runtime = "",
- [bool] $skipNonVersionedFiles = $false,
- [string] $runtimeSourceFeed = "",
- [string] $runtimeSourceFeedKey = "") {
+function InstallDotNet([string] $dotnetRoot,
+ [string] $version,
+ [string] $architecture = '',
+ [string] $runtime = '',
+ [bool] $skipNonVersionedFiles = $false,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
+ [switch] $noPath) {
$installScript = GetDotNetInstallScript $dotnetRoot
$installParameters = @{
@@ -203,15 +262,14 @@ function InstallDotNet([string] $dotnetRoot,
if ($architecture) { $installParameters.Architecture = $architecture }
if ($runtime) { $installParameters.Runtime = $runtime }
if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
+ if ($noPath) { $installParameters.NoPath = $True }
try {
& $installScript @installParameters
}
catch {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from public location."
-
- # Only the runtime can be installed from a custom [private] location.
- if ($runtime -and ($runtimeSourceFeed -or $runtimeSourceFeedKey)) {
+ if ($runtimeSourceFeed -or $runtimeSourceFeedKey) {
+ Write-Host "Failed to install dotnet from public location. Trying from '$runtimeSourceFeed'"
if ($runtimeSourceFeed) { $installParameters.AzureFeed = $runtimeSourceFeed }
if ($runtimeSourceFeedKey) {
@@ -224,10 +282,11 @@ function InstallDotNet([string] $dotnetRoot,
& $installScript @installParameters
}
catch {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from custom location '$runtimeSourceFeed'."
ExitWithExitCode 1
}
} else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from public location."
ExitWithExitCode 1
}
}
@@ -253,17 +312,26 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
return $global:_MSBuildExe
}
+ # Minimum VS version to require.
+ $vsMinVersionReqdStr = '16.8'
+ $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
+
+ # If the version of msbuild is going to be xcopied,
+ # use this version. Version matches a package here:
+ # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.8.0-preview3&view=overview
+ $defaultXCopyMSBuildVersion = '16.8.0-preview3'
+
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
$vsMinVersion = [Version]::new($vsMinVersionStr)
# Try msbuild command available in the environment.
if ($env:VSINSTALLDIR -ne $null) {
- $msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue
+ $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
if ($msbuildCmd -ne $null) {
# Workaround for https://github.com/dotnet/roslyn/issues/35793
# Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
- $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0])
+ $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
if ($msbuildVersion -ge $vsMinVersion) {
return $global:_MSBuildExe = $msbuildCmd.Path
@@ -283,17 +351,35 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
- if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
} else {
- $vsMajorVersion = $vsMinVersion.Major
- $xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha"
+ #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download
+ if($vsMinVersion -lt $vsMinVersionReqd){
+ Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible"
+ $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion
+ }
+ else{
+ # If the VS version IS compatible, look for an xcopy msbuild package
+ # with a version matching VS.
+ # Note: If this version does not exist, then an explicit version of xcopy msbuild
+ # can be specified in global.json. This will be required for pre-release versions of msbuild.
+ $vsMajorVersion = $vsMinVersion.Major
+ $vsMinorVersion = $vsMinVersion.Minor
+ $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0"
+ }
}
- $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
+ $vsInstallDir = $null
+ if ($xcopyMSBuildVersion.Trim() -ine "none") {
+ $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
+ if ($vsInstallDir -eq $null) {
+ throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
+ }
+ }
if ($vsInstallDir -eq $null) {
- throw "Unable to find Visual Studio that has required version and components installed"
+ throw 'Unable to find Visual Studio that has required version and components installed'
}
}
@@ -317,7 +403,7 @@ function InstallXCopyMSBuild([string]$packageVersion) {
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = "RoslynTools.MSBuild"
+ $packageName = 'RoslynTools.MSBuild'
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
@@ -333,7 +419,7 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
Unzip $packagePath $packageDir
}
- return Join-Path $packageDir "tools"
+ return Join-Path $packageDir 'tools'
}
#
@@ -357,29 +443,34 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = "2.5.2"
+ $vswhereVersion = '2.5.2'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
- $vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
+ $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host "Downloading vswhere"
- Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ Write-Host 'Downloading vswhere'
+ try {
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ }
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild", "-products", "*")
+ $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
- if (Get-Member -InputObject $vsRequirements -Name "version") {
- $args += "-version"
+ if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ $args += '-version'
$args += $vsRequirements.version
}
- if (Get-Member -InputObject $vsRequirements -Name "components") {
+ if (Get-Member -InputObject $vsRequirements -Name 'components') {
foreach ($component in $vsRequirements.components) {
- $args += "-requires"
+ $args += '-requires'
$args += $component
}
}
@@ -405,13 +496,13 @@ function InitializeBuildTool() {
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
- if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
- if ($msbuildEngine -eq "dotnet") {
+ if ($msbuildEngine -eq 'dotnet') {
if (!$dotnetRoot) {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "/global.json must specify 'tools.dotnet'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
ExitWithExitCode 1
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
@@ -420,13 +511,13 @@ function InitializeBuildTool() {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
} catch {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
} else {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
}
@@ -435,26 +526,29 @@ function InitializeBuildTool() {
function GetDefaultMSBuildEngine() {
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
- if (Get-Member -InputObject $GlobalJson.tools -Name "vs") {
- return "vs"
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
+ return 'vs'
}
- if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
- return "dotnet"
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ return 'dotnet'
}
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
ExitWithExitCode 1
}
function GetNuGetPackageCachePath() {
if ($env:NUGET_PACKAGES -eq $null) {
- # Use local cache on CI to ensure deterministic build,
+ # Use local cache on CI to ensure deterministic build.
+ # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116
# use global cache in dev builds to avoid cost of downloading packages.
+ # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
if ($useGlobalNuGetCache) {
- $env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages"
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else {
- $env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages"
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
+ $env:RESTORENOCACHE = $true
}
}
@@ -467,7 +561,7 @@ function GetSdkTaskProject([string]$taskName) {
}
function InitializeNativeTools() {
- if (Get-Member -InputObject $GlobalJson -Name "native-tools") {
+ if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
$nativeArgs= @{}
if ($ci) {
$nativeArgs = @{
@@ -496,20 +590,20 @@ function InitializeToolset() {
}
if (-not $restore) {
- Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Toolset version $toolsetVersion has not been restored."
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$buildTool = InitializeBuildTool
- $proj = Join-Path $ToolsetDir "restore.proj"
- $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" }
+ $proj = Join-Path $ToolsetDir 'restore.proj'
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
'' | Set-Content $proj
MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
- $path = Get-Content $toolsetLocationFile -TotalCount 1
+ $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
@@ -525,7 +619,7 @@ function ExitWithExitCode([int] $exitCode) {
}
function Stop-Processes() {
- Write-Host "Killing running build processes..."
+ Write-Host 'Killing running build processes...'
foreach ($processName in $processesToStopOnExit) {
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
}
@@ -540,15 +634,16 @@ function MSBuild() {
if ($pipelinesLog) {
$buildTool = InitializeBuildTool
- # Work around issues with Azure Artifacts credential provider
- # https://github.com/dotnet/arcade/issues/3932
- if ($ci -and $buildTool.Tool -eq "dotnet") {
- dotnet nuget locals http-cache -c
+ if ($ci -and $buildTool.Tool -eq 'dotnet') {
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
}
$toolsetBuildProject = InitializeToolset
$path = Split-Path -parent $toolsetBuildProject
- $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll")
+ $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')
$args += "/logger:$path"
}
@@ -562,13 +657,13 @@ function MSBuild() {
#
function MSBuild-Core() {
if ($ci) {
- if (!$binaryLog) {
- Write-PipelineTaskError -Message "Binary log must be enabled in CI build."
+ if (!$binaryLog -and !$excludeCIBinarylog) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.'
ExitWithExitCode 1
}
if ($nodeReuse) {
- Write-PipelineTaskError -Message "Node reuse must be disabled in CI build."
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
ExitWithExitCode 1
}
}
@@ -578,10 +673,10 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
if ($warnAsError) {
- $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
+ $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
}
else {
- $cmdArgs += " /p:TreatWarningsAsErrors=false"
+ $cmdArgs += ' /p:TreatWarningsAsErrors=false'
}
foreach ($arg in $args) {
@@ -590,10 +685,12 @@ function MSBuild-Core() {
}
}
+ $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+
$exitCode = Exec-Process $buildTool.Path $cmdArgs
if ($exitCode -ne 0) {
- Write-PipelineTaskError -Message "Build failed."
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Build failed.'
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
if ($buildLog -ne $null) {
@@ -608,12 +705,12 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) {
foreach ($argument in $arguments) {
if ($argument -ne $null) {
$arg = $argument.Trim()
- if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
- return $arg.Substring("/bl:".Length)
+ if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
+ return $arg.Substring('/bl:'.Length)
}
- if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
- return $arg.Substring("/binaryLogger:".Length)
+ if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
+ return $arg.Substring('/binaryLogger:'.Length)
}
}
}
@@ -634,16 +731,26 @@ function IsWindowsPlatform() {
return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT
}
+function Get-Darc($version) {
+ $darcPath = "$TempDir\darc\$(New-Guid)"
+ if ($version -ne $null) {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host
+ } else {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host
+ }
+ return "$darcPath\darc.exe"
+}
+
. $PSScriptRoot\pipeline-logging-functions.ps1
-$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
-$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
-$ArtifactsDir = Join-Path $RepoRoot "artifacts"
-$ToolsetDir = Join-Path $ArtifactsDir "toolset"
-$ToolsDir = Join-Path $RepoRoot ".tools"
-$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
-$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
-$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
+$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
+$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
+$ToolsDir = Join-Path $RepoRoot '.tools'
+$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
# true if global.json contains a "runtimes" section
$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
@@ -656,3 +763,18 @@ Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
+
+# Import custom tools configuration, if present in the repo.
+# Note: Import in global scope so that the script set top-level variables without qualification.
+if (!$disableConfigureToolsetImport) {
+ $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
+ if (Test-Path $configureToolsetScript) {
+ . $configureToolsetScript
+ if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
+ if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
+ ExitWithExitCode $LastExitCode
+ }
+ }
+ }
+}
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index acbb0c5b3ff..b5d63cb1b7c 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -18,9 +18,17 @@ fi
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
configuration=${configuration:-'Debug'}
+# Set to true to opt out of outputting binary log while running in CI
+exclude_ci_binary_log=${exclude_ci_binary_log:-false}
+
+if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then
+ binary_log_default=true
+else
+ binary_log_default=false
+fi
+
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
-# Binary log must be enabled on CI.
-binary_log=${binary_log:-$ci}
+binary_log=${binary_log:-$binary_log_default}
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
prepare_machine=${prepare_machine:-false}
@@ -41,7 +49,7 @@ fi
# Configures warning treatment in msbuild.
warn_as_error=${warn_as_error:-true}
-# True to attempt using .NET Core already that meets requirements specified in global.json
+# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
@@ -56,6 +64,14 @@ else
use_global_nuget_cache=${use_global_nuget_cache:-true}
fi
+# Used when restoring .NET SDK from alternative feeds
+runtime_source_feed=${runtime_source_feed:-''}
+runtime_source_feed_key=${runtime_source_feed_key:-''}
+
+# Determines if dotnet-install.sh comes from the eng/common folder or the internet
+# (default = public version)
+use_default_dotnet_install=${use_default_dotnet_install:-false}
+
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
@@ -77,11 +93,11 @@ function ResolvePath {
function ReadGlobalVersion {
local key=$1
- local line=`grep -m 1 "$key" "$global_json_file"`
+ local line=$(awk "/$key/ {print; exit}" "$global_json_file")
local pattern="\"$key\" *: *\"(.*)\""
if [[ ! $line =~ $pattern ]]; then
- Write-PipelineTelemetryError -category 'InitializeToolset' "Error: Cannot find \"$key\" in $global_json_file"
+ Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
ExitWithExitCode 1
fi
@@ -152,15 +168,6 @@ function InitializeDotNetCli {
# build steps from using anything other than what we've downloaded.
Write-PipelinePrependPath -path "$dotnet_root"
- # Work around issues with Azure Artifacts credential provider
- # https://github.com/dotnet/arcade/issues/3932
- if [[ "$ci" == true ]]; then
- export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
- export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
- Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
- Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
- fi
-
Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
@@ -171,69 +178,91 @@ function InitializeDotNetCli {
function InstallDotNetSdk {
local root=$1
local version=$2
- local architecture=""
- if [[ $# == 3 ]]; then
+ local architecture="unset"
+ if [[ $# -ge 3 ]]; then
architecture=$3
fi
- InstallDotNet "$root" "$version" $architecture
+ InstallDotNet "$root" "$version" $architecture 'sdk' 'false' $runtime_source_feed $runtime_source_feed_key
}
function InstallDotNet {
local root=$1
local version=$2
-
+
GetDotNetInstallScript "$root"
local install_script=$_GetDotNetInstallScript
local archArg=''
- if [[ -n "${3:-}" ]]; then
+ if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then
archArg="--architecture $3"
fi
local runtimeArg=''
- if [[ -n "${4:-}" ]]; then
+ if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
runtimeArg="--runtime $4"
fi
-
local skipNonVersionedFilesArg=""
- if [[ "$#" -ge "5" ]]; then
+ if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then
skipNonVersionedFilesArg="--skip-non-versioned-files"
fi
bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || {
local exit_code=$?
- Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from public location (exit code '$exit_code')."
+ echo "Failed to install dotnet SDK from public location (exit code '$exit_code')."
- if [[ -n "$runtimeArg" ]]; then
- local runtimeSourceFeed=''
- if [[ -n "${6:-}" ]]; then
- runtimeSourceFeed="--azure-feed $6"
- fi
+ local runtimeSourceFeed=''
+ if [[ -n "${6:-}" ]]; then
+ runtimeSourceFeed="--azure-feed $6"
+ fi
- local runtimeSourceFeedKey=''
- if [[ -n "${7:-}" ]]; then
- # The 'base64' binary on alpine uses '-d' and doesn't support '--decode'
- # '-d'. To work around this, do a simple detection and switch the parameter
- # accordingly.
- decodeArg="--decode"
- if base64 --help 2>&1 | grep -q "BusyBox"; then
- decodeArg="-d"
- fi
- decodedFeedKey=`echo $7 | base64 $decodeArg`
- runtimeSourceFeedKey="--feed-credential $decodedFeedKey"
+ local runtimeSourceFeedKey=''
+ if [[ -n "${7:-}" ]]; then
+ # The 'base64' binary on alpine uses '-d' and doesn't support '--decode'
+ # '-d'. To work around this, do a simple detection and switch the parameter
+ # accordingly.
+ decodeArg="--decode"
+ if base64 --help 2>&1 | grep -q "BusyBox"; then
+ decodeArg="-d"
fi
+ decodedFeedKey=`echo $7 | base64 $decodeArg`
+ runtimeSourceFeedKey="--feed-credential $decodedFeedKey"
+ fi
- if [[ -n "$runtimeSourceFeed" || -n "$runtimeSourceFeedKey" ]]; then
- bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg $runtimeSourceFeed $runtimeSourceFeedKey || {
- local exit_code=$?
- Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from custom location '$runtimeSourceFeed' (exit code '$exit_code')."
- ExitWithExitCode $exit_code
- }
- else
+ if [[ -n "$runtimeSourceFeed" || -n "$runtimeSourceFeedKey" ]]; then
+ bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg $runtimeSourceFeed $runtimeSourceFeedKey || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from custom location '$runtimeSourceFeed' (exit code '$exit_code')."
ExitWithExitCode $exit_code
+ }
+ else
+ if [[ $exit_code != 0 ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from public location (exit code '$exit_code')."
fi
+ ExitWithExitCode $exit_code
fi
}
}
+function with_retries {
+ local maxRetries=5
+ local retries=1
+ echo "Trying to run '$@' for maximum of $maxRetries attempts."
+ while [[ $((retries++)) -le $maxRetries ]]; do
+ "$@"
+
+ if [[ $? == 0 ]]; then
+ echo "Ran '$@' successfully."
+ return 0
+ fi
+
+ timeout=$((2**$retries-1))
+ echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2
+ sleep $timeout
+ done
+
+ echo "Failed to execute '$@' for $maxRetries times." 1>&2
+
+ return 1
+}
+
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
@@ -242,23 +271,30 @@ function GetDotNetInstallScript {
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
- echo "Downloading '$install_script_url'"
+ if [[ "$use_default_dotnet_install" == true ]]; then
+ echo "Downloading '$install_script_url'"
- # Use curl if available, otherwise use wget
- if command -v curl > /dev/null; then
- curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
- local exit_code=$?
- Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
- ExitWithExitCode $exit_code
- }
- else
- wget -q -O "$install_script" "$install_script_url" || {
- local exit_code=$?
- Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
- ExitWithExitCode $exit_code
- }
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ with_retries curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ else
+ with_retries wget -v -O "$install_script" "$install_script_url" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ fi
+ else
+ # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+ # See https://github.com/dotnet/arcade/issues/6047 for details
+ cp $repo_root/eng/common/dotnet-install-scripts/dotnet-install.sh $install_script
fi
fi
+
# return value
_GetDotNetInstallScript="$install_script"
}
@@ -267,21 +303,23 @@ function InitializeBuildTool {
if [[ -n "${_InitializeBuildTool:-}" ]]; then
return
fi
-
+
InitializeDotNetCli $restore
# return values
- _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
+ _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
_InitializeBuildToolFramework="netcoreapp2.1"
}
+# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages"
else
export NUGET_PACKAGES="$repo_root/.packages"
+ export RESTORENOCACHE=true
fi
fi
@@ -290,6 +328,9 @@ function GetNuGetPackageCachePath {
}
function InitializeNativeTools() {
+ if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
+ return
+ fi
if grep -Fq "native-tools" $global_json_file
then
local nativeArgs=""
@@ -332,14 +373,14 @@ function InitializeToolset {
if [[ "$binary_log" == true ]]; then
bl="/bl:$log_dir/ToolsetRestore.binlog"
fi
-
+
echo '' > "$proj"
MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
local toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
- Write-PipelineTelemetryError -category 'InitializeToolset' "Invalid toolset path: $toolset_build_proj"
+ Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
@@ -367,10 +408,11 @@ function MSBuild {
InitializeBuildTool
InitializeToolset
- # Work around issues with Azure Artifacts credential provider
- # https://github.com/dotnet/arcade/issues/3932
if [[ "$ci" == true ]]; then
- dotnet nuget locals http-cache -c
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
fi
local toolset_dir="${_InitializeToolset%/*}"
@@ -383,13 +425,13 @@ function MSBuild {
function MSBuild-Core {
if [[ "$ci" == true ]]; then
- if [[ "$binary_log" != true ]]; then
- Write-PipelineTaskError "Binary log must be enabled in CI build."
+ if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch."
ExitWithExitCode 1
fi
if [[ "$node_reuse" == true ]]; then
- Write-PipelineTaskError "Node reuse must be disabled in CI build."
+ Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
ExitWithExitCode 1
fi
fi
@@ -401,11 +443,17 @@ function MSBuild-Core {
warnaserror_switch="/warnaserror"
fi
- "$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
- local exit_code=$?
- Write-PipelineTaskError "Build failed (exit code '$exit_code')."
- ExitWithExitCode $exit_code
+ function RunBuildTool {
+ export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@"
+
+ "$_InitializeBuildTool" "$@" || {
+ local exit_code=$?
+ Write-PipelineTaskError "Build failed (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
}
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
}
ResolvePath "${BASH_SOURCE[0]}"
@@ -424,7 +472,7 @@ temp_dir="$artifacts_dir/tmp/$configuration"
global_json_file="$repo_root/global.json"
# determine if global.json contains a "runtimes" entry
global_json_has_runtimes=false
-dotnetlocal_key=`grep -m 1 "runtimes" "$global_json_file"` || true
+dotnetlocal_key=$(awk "/runtimes/ {print; exit}" "$global_json_file") || true
if [[ -n "$dotnetlocal_key" ]]; then
global_json_has_runtimes=true
fi
@@ -444,3 +492,18 @@ Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
+
+# Import custom tools configuration, if present in the repo.
+if [ -z "${disable_configure_toolset_import:-}" ]; then
+ configure_toolset_script="$eng_root/configure-toolset.sh"
+ if [[ -a "$configure_toolset_script" ]]; then
+ . "$configure_toolset_script"
+ fi
+fi
+
+# TODO: https://github.com/dotnet/arcade/issues/1468
+# Temporary workaround to avoid breaking change.
+# Remove once repos are updated.
+if [[ -n "${useInstalledDotNetCli:-}" ]]; then
+ use_installed_dotnet_cli="$useInstalledDotNetCli"
+fi
diff --git a/global.json b/global.json
index f3705522809..c46cbd36be2 100644
--- a/global.json
+++ b/global.json
@@ -1,6 +1,6 @@
{
"tools": {
- "dotnet": "3.1.100",
+ "dotnet": "5.0.102",
"runtimes": {
"dotnet/x64": [
"2.1.7"
@@ -12,6 +12,6 @@
},
"msbuild-sdks": {
"Microsoft.Build.CentralPackageVersions": "2.0.1",
- "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.20580.3"
+ "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.20621.10"
}
}
diff --git a/src/Build.UnitTests/ProjectEvaluationFinishedEventArgs_Tests.cs b/src/Build.UnitTests/ProjectEvaluationFinishedEventArgs_Tests.cs
index 57924761e84..93c1af36e9d 100644
--- a/src/Build.UnitTests/ProjectEvaluationFinishedEventArgs_Tests.cs
+++ b/src/Build.UnitTests/ProjectEvaluationFinishedEventArgs_Tests.cs
@@ -1,4 +1,4 @@
-// Copyright (c) Microsoft. All rights reserved.
+// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
@@ -26,7 +26,7 @@ public class ProjectEvaluationFinishedEventArgs_Tests
public void ProfilerResultRoundTrip(ProfilerResult profilerResult)
{
var writeTranslator = TranslationHelpers.GetWriteTranslator();
- ProfilerResult deserializedResult;
+ ProfilerResult deserializedResult = default;
writeTranslator.TranslateDotNet(ref profilerResult);
diff --git a/src/Build/Evaluation/Expander.cs b/src/Build/Evaluation/Expander.cs
index 33d2c467560..a655fd60a73 100644
--- a/src/Build/Evaluation/Expander.cs
+++ b/src/Build/Evaluation/Expander.cs
@@ -1,4 +1,4 @@
-// Copyright (c) Microsoft. All rights reserved.
+// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
@@ -4935,7 +4935,7 @@ private string GenerateStringOfMethodExecuted(string expression, object objectIn
}
else
{
- string propertyValue = "\"" + objectInstance as string + "\"";
+ string propertyValue = $"\"{objectInstance as string}\"";
if ((_bindingFlags & BindingFlags.InvokeMethod) == BindingFlags.InvokeMethod)
{
diff --git a/src/Build/Microsoft.Build.csproj b/src/Build/Microsoft.Build.csproj
index 41fc39891bb..ec287656588 100644
--- a/src/Build/Microsoft.Build.csproj
+++ b/src/Build/Microsoft.Build.csproj
@@ -1,13 +1,11 @@
-
+
-
- netcoreapp2.1
- $(FullFrameworkTFM);netcoreapp2.1
+ net5.0
+ $(FullFrameworkTFM);net5.0
$(RuntimeOutputTargetFrameworks)
Microsoft.Build
Microsoft.Build
@@ -34,6 +32,7 @@
+
diff --git a/src/Deprecated/Conversion.UnitTests/XMakeConversionUnitTests.csproj b/src/Deprecated/Conversion.UnitTests/XMakeConversionUnitTests.csproj
index 762d88b132b..3f67674e3f2 100644
--- a/src/Deprecated/Conversion.UnitTests/XMakeConversionUnitTests.csproj
+++ b/src/Deprecated/Conversion.UnitTests/XMakeConversionUnitTests.csproj
@@ -96,5 +96,5 @@
-
-
\ No newline at end of file
+
+
diff --git a/src/Deprecated/Conversion/Microsoft.Build.Conversion.csproj b/src/Deprecated/Conversion/Microsoft.Build.Conversion.csproj
index af98c8f06b8..1fa50bc5c3d 100644
--- a/src/Deprecated/Conversion/Microsoft.Build.Conversion.csproj
+++ b/src/Deprecated/Conversion/Microsoft.Build.Conversion.csproj
@@ -20,7 +20,7 @@
-
+
$(AssemblyName).Strings.resources
Designer
@@ -31,12 +31,12 @@
$(AssemblyName).Strings.shared.resources
Designer
-
+
-
+
diff --git a/src/Deprecated/Engine.UnitTests/Microsoft.Build.Engine.UnitTests.csproj b/src/Deprecated/Engine.UnitTests/Microsoft.Build.Engine.UnitTests.csproj
index c615fe9bdbb..7360b075b1d 100644
--- a/src/Deprecated/Engine.UnitTests/Microsoft.Build.Engine.UnitTests.csproj
+++ b/src/Deprecated/Engine.UnitTests/Microsoft.Build.Engine.UnitTests.csproj
@@ -200,5 +200,5 @@
-
-
\ No newline at end of file
+
+
diff --git a/src/Deprecated/Engine/Microsoft.Build.Engine.csproj b/src/Deprecated/Engine/Microsoft.Build.Engine.csproj
index 1626f53236d..99f763a29b4 100644
--- a/src/Deprecated/Engine/Microsoft.Build.Engine.csproj
+++ b/src/Deprecated/Engine/Microsoft.Build.Engine.csproj
@@ -15,7 +15,7 @@
false
$(NoWarn);1570;1572;1573;1587
-
+
@@ -217,5 +217,5 @@
-
+
diff --git a/src/Directory.BeforeCommon.targets b/src/Directory.BeforeCommon.targets
index 3741fdc7d47..297b1c7c17a 100644
--- a/src/Directory.BeforeCommon.targets
+++ b/src/Directory.BeforeCommon.targets
@@ -107,10 +107,10 @@
$(DefineConstants);MONO
- $(DefineConstants);TEST_ISWINDOWS
+ $(DefineConstants);TEST_ISWINDOWS
-
+
true
$(DefineConstants);RUNTIME_TYPE_NETCORE
@@ -124,7 +124,7 @@
$(DefineConstants);WORKAROUND_COREFX_19110
-
+
$(DefineConstants);FEATURE_PIPEOPTIONS_CURRENTUSERONLY
$(DefineConstants);FEATURE_NODE_REUSE
$(DefineConstants);FEATURE_SPAN
diff --git a/src/Directory.Build.props b/src/Directory.Build.props
index 54b620c6013..e957eaa9db1 100644
--- a/src/Directory.Build.props
+++ b/src/Directory.Build.props
@@ -24,7 +24,7 @@
$(MSBuildThisFileDirectory)Directory.BeforeCommon.targets
AnyCPU;x64
-
+
$(FullFrameworkTFM);netstandard2.0
netstandard2.0
@@ -32,8 +32,8 @@
AnyCPU
- netcoreapp2.1
- $(FullFrameworkTFM);$(RuntimeOutputTargetFrameworks)
+ net5.0
+ $(FullFrameworkTFM);$(RuntimeOutputTargetFrameworks)
$(FullFrameworkTFM)
diff --git a/src/Directory.Build.targets b/src/Directory.Build.targets
index 7d4b6537997..e90e7f4fbf3 100644
--- a/src/Directory.Build.targets
+++ b/src/Directory.Build.targets
@@ -17,21 +17,22 @@
Exe
false
-
+
$(XunitOptions) -diagnostics
-
+
- $(XunitOptions) -notrait category=nonwindowstests
- $(XunitOptions) -notrait category=nonlinuxtests
- -notrait category=nonlinuxtests -notrait category=netcore-linux-failing
- $(XunitOptions) -notrait category=nonosxtests
- -notrait category=nonosxtests -notrait category=netcore-osx-failing
- $(XunitOptions) -notrait category=nonfreebsdtests
+ $(XunitOptions) -notrait category=nonwindowstests
+ $(XunitOptions) -notrait category=nonlinuxtests
+ -notrait category=nonlinuxtests -notrait category=netcore-linux-failing
+ $(XunitOptions) -notrait category=nonosxtests
+ -notrait category=nonosxtests -notrait category=netcore-osx-failing
+
+ $(XunitOptions) -notrait category=nonfreebsdtests
$(XunitOptions) -notrait category=non-mono-tests -notrait category=nonmonotests
- $(XunitOptions) -notrait category=mono-osx-failing
- $(XunitOptions) -notrait category=mono-windows-failing
+ $(XunitOptions) -notrait category=mono-osx-failing
+ $(XunitOptions) -notrait category=mono-windows-failing
$(XunitOptions) -notrait category=nonnetcoreapptests
$(XunitOptions) -notrait category=nonnetfxtests
@@ -39,7 +40,7 @@
$(XunitOptions) -notrait category=failing
$(XunitOptions)
-
+
$(DevDivPackagesDir)
@@ -73,17 +74,18 @@
false
-
-
+
+
$(AssemblyName)
$(MSBuildProjectName)
net
netstandard
netstandard
+ netstandard
$(RepoRoot)ref\$(GenAPIAssemblyName)\$(GenAPIShortFrameworkIdentifier)\$(GenAPIAssemblyName).cs
-
-
+
+
@@ -115,7 +117,7 @@
-
+
$([Microsoft.Build.Utilities.ToolLocationHelper]::GetPathToDotNetFrameworkSdkFile('tlbexp.exe'))
diff --git a/src/MSBuild.Bootstrap/MSBuild.Bootstrap.csproj b/src/MSBuild.Bootstrap/MSBuild.Bootstrap.csproj
index 3001ac1018d..dd786dfbed2 100644
--- a/src/MSBuild.Bootstrap/MSBuild.Bootstrap.csproj
+++ b/src/MSBuild.Bootstrap/MSBuild.Bootstrap.csproj
@@ -45,10 +45,10 @@
-
+
-
+
diff --git a/src/MSBuild/MSBuild.csproj b/src/MSBuild/MSBuild.csproj
index a6f0873dac4..a75bbeff256 100644
--- a/src/MSBuild/MSBuild.csproj
+++ b/src/MSBuild/MSBuild.csproj
@@ -7,6 +7,9 @@
Exe
$(RuntimeOutputTargetFrameworks)
$(RuntimeOutputPlatformTarget)
+
+ false
-
+
diff --git a/src/Utilities.UnitTests/TrackedDependencies/FileTrackerTests.cs b/src/Utilities.UnitTests/TrackedDependencies/FileTrackerTests.cs
index 9558f9098ea..79db28ce138 100644
--- a/src/Utilities.UnitTests/TrackedDependencies/FileTrackerTests.cs
+++ b/src/Utilities.UnitTests/TrackedDependencies/FileTrackerTests.cs
@@ -1,6 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#if FEATURE_FILE_TRACKER
+
using System;
using System.Collections.Generic;
using System.Diagnostics;
@@ -17,7 +19,6 @@
using BackEndNativeMethods = Microsoft.Build.BackEnd.NativeMethods;
// PLEASE NOTE: This is a UNICODE file as it contains UNICODE characters!
-#if FEATURE_FILE_TRACKER
namespace Microsoft.Build.UnitTests.FileTracking
{
diff --git a/src/Xunit.NetCore.Extensions/SkippedTestCase.cs b/src/Xunit.NetCore.Extensions/SkippedTestCase.cs
index d094c882519..518f4ffa77a 100644
--- a/src/Xunit.NetCore.Extensions/SkippedTestCase.cs
+++ b/src/Xunit.NetCore.Extensions/SkippedTestCase.cs
@@ -17,6 +17,11 @@ internal sealed class SkippedTestCase : LongLivedMarshalByRefObject, IXunitTestC
private readonly IXunitTestCase _testCase;
private readonly string _skippedReason;
+ public SkippedTestCase()
+ {
+
+ }
+
internal SkippedTestCase(IXunitTestCase testCase, string skippedReason)
{
_testCase = testCase;