diff --git a/FSharpBuild.Directory.Build.targets b/FSharpBuild.Directory.Build.targets
index 7548cef7acf..3132ba1c970 100644
--- a/FSharpBuild.Directory.Build.targets
+++ b/FSharpBuild.Directory.Build.targets
@@ -1,4 +1,5 @@
+
@@ -8,7 +9,7 @@
+ BeforeTargets="AssignTargetPaths;BeforeBuild;GenerateFSharpTextResources">
<__TargetFilePath>@(NoneSubstituteText->'$(IntermediateOutputPath)%(Filename)%(Extension)')
@@ -20,16 +21,19 @@
<_CopyToOutputDirectory Condition="'%(NoneSubstituteText.CopyToOutputDirectory)' != ''">%(NoneSubstituteText.CopyToOutputDirectory)
<_CopyToOutputDirectory Condition="'%(NoneSubstituteText.CopyToOutputDirectory)' == ''">Never
+
+ <_IncludeInVsix>false
+ <_IncludeInVsix Condition="'%(NoneSubstituteText.IncludeInVsix)' == 'true'">true
-
-
+
+
@@ -61,4 +65,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index ce181f02b84..4f40053c4cc 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -3,9 +3,9 @@
-
+
https://github.com/dotnet/arcade
- d2d025c1de37b1258f147851cb3e7373ad5ff09d
+ d4a1ce6278134f5dc25843e228d0498203031e61
diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1
index 5442eff3861..b8d84607b89 100644
--- a/eng/common/CheckSymbols.ps1
+++ b/eng/common/CheckSymbols.ps1
@@ -5,12 +5,11 @@ param(
)
Add-Type -AssemblyName System.IO.Compression.FileSystem
-. $PSScriptRoot\pipeline-logging-functions.ps1
function FirstMatchingSymbolDescriptionOrDefault {
param(
[string] $FullPath, # Full path to the module that has to be checked
- [string] $TargetServerParameter, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
[string] $SymbolsPath
)
@@ -22,36 +21,36 @@ function FirstMatchingSymbolDescriptionOrDefault {
# checking and which type of file was uploaded.
# The file itself is returned
- $SymbolPath = $SymbolsPath + '\' + $FileName
+ $SymbolPath = $SymbolsPath + "\" + $FileName
# PDB file for the module
- $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
+ $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
# PDB file for R2R module (created by crossgen)
- $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
+ $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
# DBG file for a .so library
- $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
+ $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
# DWARF file for a .dylib
- $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+ $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
- .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParameter $FullPath -o $SymbolsPath | Out-Null
+ .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
if (Test-Path $PdbPath) {
- return 'PDB'
+ return "PDB"
}
elseif (Test-Path $NGenPdb) {
- return 'NGen PDB'
+ return "NGen PDB"
}
elseif (Test-Path $SODbg) {
- return 'DBG for SO'
+ return "DBG for SO"
}
elseif (Test-Path $DylibDwarf) {
- return 'Dwarf for Dylib'
+ return "Dwarf for Dylib"
}
elseif (Test-Path $SymbolPath) {
- return 'Module'
+ return "Module"
}
else {
return $null
@@ -69,7 +68,7 @@ function CountMissingSymbols {
}
# Extensions for which we'll look for symbols
- $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
+ $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
# How many files are missing symbol information
$MissingSymbols = 0
@@ -77,7 +76,7 @@ function CountMissingSymbols {
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$PackageGuid = New-Guid
$ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
- $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
[System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
@@ -87,31 +86,31 @@ function CountMissingSymbols {
Get-ChildItem -Recurse $ExtractPath |
Where-Object {$RelevantExtensions -contains $_.Extension} |
ForEach-Object {
- if ($_.FullName -Match '\\ref\\') {
+ if ($_.FullName -Match "\\ref\\") {
Write-Host "`t Ignoring reference assembly file" $_.FullName
return
}
- $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--microsoft-symbol-server' -SymbolsPath $SymbolsPath
- $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--internal-server' -SymbolsPath $SymbolsPath
+ $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
+ $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
- Write-Host "Symbols found on MSDL (${$SymbolsOnMSDL}) and SymWeb (${$SymbolsOnSymWeb})"
+ Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
}
else {
$MissingSymbols++
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
- Write-Host 'No symbols found on MSDL or SymWeb!'
+ Write-Host "No symbols found on MSDL or SymWeb!"
}
else {
if ($SymbolsOnMSDL -eq $null) {
- Write-Host 'No symbols found on MSDL!'
+ Write-Host "No symbols found on MSDL!"
}
else {
- Write-Host 'No symbols found on SymWeb!'
+ Write-Host "No symbols found on SymWeb!"
}
}
}
@@ -130,26 +129,26 @@ function CheckSymbolsAvailable {
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$FileName = $_.Name
-
+
# These packages from Arcade-Services include some native libraries that
# our current symbol uploader can't handle. Below is a workaround until
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
- if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
+ if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
- elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
+ elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
-
+
Write-Host "Validating $FileName "
$Status = CountMissingSymbols "$InputPath\$FileName"
if ($Status -ne 0) {
- Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $Status modules in the package $FileName"
+ Write-Error "Missing symbols for $Status modules in the package $FileName"
}
Write-Host
diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj
index 311e2bbe0f6..5d55e312b01 100644
--- a/eng/common/PublishToSymbolServers.proj
+++ b/eng/common/PublishToSymbolServers.proj
@@ -37,8 +37,6 @@
3650
true
- true
- true
false
@@ -58,7 +56,7 @@
DryRun="false"
ConvertPortablePdbsToWindowsPdbs="false"
PdbConversionTreatAsWarning=""
- Condition="$(PublishToSymbolServer) and $(PublishToMSDL)"/>
+ Condition="$(PublishToSymbolServer)"/>
git commit --> git push
- Write-Host 'git add .'
- git add .
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
- git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- Write-Host 'git push'
- git push
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
+# We create the temp directory where we'll store the sdl-config repository
+$sdlDir = Join-Path $env:TEMP "sdl"
+if (Test-Path $sdlDir) {
+ Remove-Item -Force -Recurse $sdlDir
+}
- # Return to the original directory
- Pop-Location
+Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git clone failed with exit code $LASTEXITCODE."
+}
+# We copy the .gdn folder from our local run into the git repository so it can be committed
+$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn"
+if (Get-Command Robocopy) {
+ Robocopy /S $GdnFolder $sdlRepositoryFolder
+} else {
+ rsync -r $GdnFolder $sdlRepositoryFolder
}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
\ No newline at end of file
+# cd to the sdl-config directory so we can run git there
+Push-Location $sdlDir
+# git add . --> git commit --> git push
+Write-Host "git add ."
+git add .
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git add failed with exit code $LASTEXITCODE."
+}
+Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git commit failed with exit code $LASTEXITCODE."
+}
+Write-Host "git push"
+git push
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git push failed with exit code $LASTEXITCODE."
+}
+
+# Return to the original directory
+Pop-Location
\ No newline at end of file
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
index 40a084f7969..9bc25314ae2 100644
--- a/eng/common/sdl/run-sdl.ps1
+++ b/eng/common/sdl/run-sdl.ps1
@@ -5,65 +5,55 @@ Param(
[string] $GdnFolder,
[string[]] $ToolsList,
[string] $UpdateBaseline,
- [string] $GuardianLoggerLevel='Standard',
+ [string] $GuardianLoggerLevel="Standard",
[string[]] $CrScanAdditionalRunConfigParams,
[string[]] $PoliCheckAdditionalRunConfigParams
)
-$ErrorActionPreference = 'Stop'
+$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
$LASTEXITCODE = 0
-try {
- . $PSScriptRoot\..\tools.ps1
+# We store config files in the r directory of .gdn
+Write-Host $ToolsList
+$gdnConfigPath = Join-Path $GdnFolder "r"
+$ValidPath = Test-Path $GuardianCliLocation
- # We store config files in the r directory of .gdn
- Write-Host $ToolsList
- $gdnConfigPath = Join-Path $GdnFolder 'r'
- $ValidPath = Test-Path $GuardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
- ExitWithExitCode 1
- }
-
- $configParam = @('--config')
+if ($ValidPath -eq $False)
+{
+ Write-Host "Invalid Guardian CLI Location."
+ exit 1
+}
- foreach ($tool in $ToolsList) {
- $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
- Write-Host $tool
- # We have to manually configure tools that run on source to look at the source directory only
- if ($tool -eq 'credscan') {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
+$configParam = @("--config")
+
+foreach ($tool in $ToolsList) {
+ $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
+ Write-Host $tool
+ # We have to manually configure tools that run on source to look at the source directory only
+ if ($tool -eq "credscan") {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
}
- if ($tool -eq 'policheck') {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
+ }
+ if ($tool -eq "policheck") {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
}
-
- $configParam+=$gdnConfigFile
}
- Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
- & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
+ $configParam+=$gdnConfigFile
+}
+
+Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
+& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
+if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
\ No newline at end of file
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
index 2973bcaf3a4..52e2ff021d7 100644
--- a/eng/common/templates/job/execute-sdl.yml
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -40,13 +40,13 @@ jobs:
itemPattern: "**"
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -InputPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
displayName: Extract Blob Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ -InputPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
displayName: Extract Package Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- task: NuGetToolInstaller@1
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index ecebd0f03eb..ffda80a197b 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,33 +1,67 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
+
condition: ''
- container: ''
+
continueOnError: false
+
+ container: ''
+
dependsOn: ''
+
displayName: ''
- pool: ''
+
steps: []
+
+ pool: ''
+
strategy: ''
+
timeoutInMinutes: ''
+
variables: []
+
workspace: ''
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
+ # Job base template specific parameters
+ # Optional: Enable installing Microbuild plugin
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _TeamName - the name of your team
+ # _SignType - 'test' or 'real'
enableMicrobuild: false
+
+ # Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing to the build asset registry
enablePublishBuildAssets: false
- enablePublishTestResults: false
+
+ # Optional: Prevent gather/push manifest from executing when using publishing pipelines
enablePublishUsingPipelines: false
+
+ # Optional: Include PublishTestResults task
+ enablePublishTestResults: false
+
+ # Optional: enable sending telemetry
+ enableTelemetry: false
+
+ # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
+ helixRepo: ''
+
+ # Optional: define the helix type for telemetry (example: 'build/product/')
+ helixType: ''
+
+ # Required: name of the job
name: ''
- preSteps: []
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
jobs:
- job: ${{ parameters.name }}
@@ -59,7 +93,7 @@ jobs:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ each variable in parameters.variables }}:
@@ -91,12 +125,21 @@ jobs:
workspace: ${{ parameters.workspace }}
steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
+ - task: sendStartTelemetry@0
+ displayName: 'Send Helix Start Telemetry'
+ inputs:
+ helixRepo: ${{ parameters.helixRepo }}
+ ${{ if ne(parameters.helixType, '') }}:
+ helixType: ${{ parameters.helixType }}
+ buildConfig: $(_BuildConfig)
+ runAsPublic: ${{ parameters.runAsPublic }}
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: always()
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
@@ -108,16 +151,9 @@ jobs:
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: NuGetAuthenticate@0
- - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- ${{ each step in parameters.steps }}:
- ${{ step }}
@@ -130,60 +166,20 @@ jobs:
env:
TeamName: $(_TeamName)
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
- - publish: artifacts/log
- artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: Publish logs
- continueOnError: true
- condition: always()
- - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: CopyFiles@2
- displayName: Gather Asset Manifests
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
-
- - task: PublishBuildArtifacts@1
- displayName: Push Asset Manifests
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
- PublishLocation: Container
- ArtifactName: AssetManifests
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
+ - task: sendEndTelemetry@0
+ displayName: 'Send Helix End Telemetry'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ ArtifactName: $(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index c08225a9a97..6a2f98c036f 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,10 +1,19 @@
parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ # Optional: 'true' if failures in job.yml job should not fail the job
continueOnError: false
+ # Optional: Enable installing Microbuild plugin
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _TeamName - the name of your team
+ # _SignType - 'test' or 'real'
+ enableMicrobuild: false
+
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
+ # Optional: Enable publishing to the build asset registry
+ enablePublishBuildAssets: false
+
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
@@ -14,9 +23,19 @@ parameters:
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
+ # Optional: Include PublishTestResults task
+ enablePublishTestResults: false
+
+ # Optional: enable sending telemetry
+ # if enabled then the 'helixRepo' parameter should also be specified
+ enableTelemetry: false
+
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
+ # Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
+ helixRepo: ''
+
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
@@ -43,30 +62,29 @@ jobs:
name: ${{ job.job }}
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- pool:
- vmImage: vs2017-win2016
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
-
- - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
- - template: ../job/generate-graph-files.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
- dependsOn:
- - Asset_Registry_Publish
- pool:
- vmImage: vs2017-win2016
+- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ pool:
+ vmImage: vs2017-win2016
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+
+- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: ../job/generate-graph-files.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
+ dependsOn:
+ - Asset_Registry_Publish
+ pool:
+ vmImage: vs2017-win2016
diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
index 3a8755fbbb7..ad9375f5e5c 100644
--- a/eng/common/templates/post-build/channels/generic-internal-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
@@ -1,7 +1,4 @@
parameters:
- artifactsPublishingAdditionalParameters: ''
- dependsOn:
- - Validate
publishInstallersAndChecksums: false
symbolPublishingAdditionalParameters: ''
stageName: ''
@@ -13,14 +10,14 @@ parameters:
stages:
- stage: ${{ parameters.stageName }}
- dependsOn: ${{ parameters.dependsOn }}
+ dependsOn: validate
variables:
- template: ../common-variables.yml
displayName: ${{ parameters.channelName }} Publishing
jobs:
- template: ../setup-maestro-vars.yml
- - job: publish_symbols
+ - job:
displayName: Symbol Publishing
dependsOn: setupMaestroVars
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
@@ -29,6 +26,10 @@ stages:
pool:
vmImage: 'windows-2019'
steps:
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
@@ -41,18 +42,6 @@ stages:
artifactName: 'PDBArtifacts'
continueOnError: true
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@0
- displayName: 'Authenticate to AzDO Feeds'
-
- - task: PowerShell@2
- displayName: Enable cross-org publishing
- inputs:
- filePath: eng\common\enable-cross-org-publishing.ps1
- arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
-
- task: PowerShell@2
displayName: Publish
inputs:
@@ -64,18 +53,14 @@ stages:
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
/p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
/p:Configuration=Release
- /p:PublishToMSDL=false
${{ parameters.symbolPublishingAdditionalParameters }}
- - template: ../../steps/publish-logs.yml
- parameters:
- StageLabel: '${{ parameters.stageName }}'
- JobLabel: 'SymbolPublishing'
-
- job: publish_assets
displayName: Publish Assets
dependsOn: setupMaestroVars
variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
- name: BARBuildId
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
- name: IsStableBuild
@@ -89,14 +74,13 @@ stages:
inputs:
buildType: current
artifactName: PackageArtifacts
- continueOnError: true
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
buildType: current
artifactName: BlobArtifacts
- continueOnError: true
+
- task: DownloadBuildArtifacts@0
displayName: Download Asset Manifests
inputs:
@@ -140,6 +124,7 @@ stages:
/p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
/p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
/p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
@@ -149,11 +134,6 @@ stages:
/p:PublishToMSDL=false
${{ parameters.artifactsPublishingAdditionalParameters }}
- - template: ../../steps/publish-logs.yml
- parameters:
- StageLabel: '${{ parameters.stageName }}'
- JobLabel: 'AssetsPublishing'
-
- template: ../../steps/promote-build.yml
parameters:
ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
index 3f572f8b12f..c4bc1897d81 100644
--- a/eng/common/templates/post-build/channels/generic-public-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
@@ -1,7 +1,5 @@
parameters:
artifactsPublishingAdditionalParameters: ''
- dependsOn:
- - Validate
publishInstallersAndChecksums: false
symbolPublishingAdditionalParameters: ''
stageName: ''
@@ -13,14 +11,14 @@ parameters:
stages:
- stage: ${{ parameters.stageName }}
- dependsOn: ${{ parameters.dependsOn }}
+ dependsOn: validate
variables:
- template: ../common-variables.yml
displayName: ${{ parameters.channelName }} Publishing
jobs:
- template: ../setup-maestro-vars.yml
- - job: publish_symbols
+ - job:
displayName: Symbol Publishing
dependsOn: setupMaestroVars
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} ))
@@ -66,11 +64,6 @@ stages:
/p:Configuration=Release
${{ parameters.symbolPublishingAdditionalParameters }}
- - template: ../../steps/publish-logs.yml
- parameters:
- StageLabel: '${{ parameters.stageName }}'
- JobLabel: 'SymbolPublishing'
-
- job: publish_assets
displayName: Publish Assets
dependsOn: setupMaestroVars
@@ -141,6 +134,7 @@ stages:
/p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
/p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
/p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
@@ -149,11 +143,6 @@ stages:
/p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
${{ parameters.artifactsPublishingAdditionalParameters }}
- - template: ../../steps/publish-logs.yml
- parameters:
- StageLabel: '${{ parameters.stageName }}'
- JobLabel: 'AssetsPublishing'
-
- template: ../../steps/promote-build.yml
parameters:
ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index 9505cf170f0..216d043e4e3 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -4,7 +4,7 @@ variables:
- group: DotNet-DotNetCli-Storage
- group: DotNet-MSRC-Storage
- group: Publish-Build-Assets
-
+
# .NET Core 3.1 Dev
- name: PublicDevRelease_31_Channel_Id
value: 128
@@ -49,10 +49,6 @@ variables:
- name: NetCore_31_Blazor_Features_Channel_Id
value: 531
- # .NET Core Experimental
- - name: NetCore_Experimental_Channel_Id
- value: 562
-
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index 8a8d84f2026..b121d77e07d 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -17,14 +17,11 @@ parameters:
signingValidationAdditionalParameters: ''
# Which stages should finish execution before post-build stages start
- validateDependsOn:
- - build
- publishDependsOn:
- - Validate
+ dependsOn: [build]
stages:
-- stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
+- stage: validate
+ dependsOn: ${{ parameters.dependsOn }}
displayName: Validate
jobs:
- ${{ if eq(parameters.enableNugetValidation, 'true') }}:
@@ -49,28 +46,20 @@ stages:
- ${{ if eq(parameters.enableSigningValidation, 'true') }}:
- job:
displayName: Signing Validation
- variables:
- - template: common-variables.yml
pool:
vmImage: 'windows-2019'
steps:
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: current
- artifactName: PackageArtifacts
-
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- task: NuGetAuthenticate@0
displayName: 'Authenticate to AzDO Feeds'
- - task: PowerShell@2
- displayName: Enable cross-org publishing
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
inputs:
- filePath: eng\common\enable-cross-org-publishing.ps1
- arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+ buildType: current
+ artifactName: PackageArtifacts
- task: PowerShell@2
displayName: Validate
@@ -79,13 +68,9 @@ stages:
arguments: -task SigningValidation -restore -msbuildEngine dotnet
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ /p:Configuration=Release
${{ parameters.signingValidationAdditionalParameters }}
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
-
- ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
- job:
displayName: SourceLink Validation
@@ -121,7 +106,6 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'NetCore_Dev5_Publish'
@@ -134,7 +118,18 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev31_Publish'
+ channelName: '.NET Core 3.1 Dev'
+ channelId: 128
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'Net_Eng_Latest_Publish'
@@ -147,7 +142,6 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'Net_Eng_Validation_Publish'
@@ -160,7 +154,90 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_3_Tools_Validation_Publish'
+ channelName: '.NET 3 Tools - Validation'
+ channelId: 390
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_3_Tools_Publish'
+ channelName: '.NET 3 Tools'
+ channelId: 344
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Release30_Publish'
+ channelName: '.NET Core 3.0 Release'
+ channelId: 19
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Release31_Publish'
+ channelName: '.NET Core 3.1 Release'
+ channelId: 129
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Blazor31_Features_Publish'
+ channelName: '.NET Core 3.1 Blazor Features'
+ channelId: 531
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_30_Internal_Servicing_Publishing'
+ channelName: '.NET Core 3.0 Internal Servicing'
+ channelId: 184
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_31_Internal_Servicing_Publishing'
+ channelName: '.NET Core 3.1 Internal Servicing'
+ channelId: 550
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'General_Testing_Publish'
@@ -173,7 +250,6 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'NETCore_Tooling_Dev_Publishing'
@@ -186,7 +262,6 @@ stages:
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
stageName: 'NETCore_Tooling_Release_Publishing'
@@ -196,28 +271,74 @@ stages:
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_301xx_Publishing'
+ channelName: '.NET Core SDK 3.0.1xx'
+ channelId: 556
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-symbols/nuget/v3/index.json'
+
- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NET_Internal_Tooling_Publishing'
- channelName: '.NET Internal Tooling'
- channelId: 551
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json'
+ stageName: 'NETCore_SDK_301xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.0.1xx Internal'
+ channelId: 555
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-symbols/nuget/v3/index.json'
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
parameters:
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- dependsOn: ${{ parameters.publishDependsOn }}
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
- stageName: 'NETCore_Experimental_Publishing'
- channelName: '.NET Core Experimental'
- channelId: 562
- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json'
+ stageName: 'NETCore_SDK_311xx_Publishing'
+ channelName: '.NET Core SDK 3.1.1xx'
+ channelId: 560
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_311xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.1xx Internal'
+ channelId: 559
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_312xx_Publishing'
+ channelName: '.NET Core SDK 3.1.2xx'
+ channelId: 558
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_312xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.2xx Internal'
+ channelId: 557
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
\ No newline at end of file
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
index 716b53f7405..56242b068e1 100644
--- a/eng/common/templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -4,8 +4,6 @@ jobs:
pool:
vmImage: 'windows-2019'
steps:
- - checkout: none
-
- task: DownloadBuildArtifacts@0
displayName: Download Release Configs
inputs:
@@ -16,25 +14,5 @@ jobs:
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
- targetType: inline
- script: |
- try {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
-
- $Channels = ""
- $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," }
-
- $IsStableBuild = $Content | Select -Index 2
-
- Write-Host "##vso[task.setvariable variable=BARBuildId;isOutput=true]$BarId"
- Write-Host "##vso[task.setvariable variable=InitialChannels;isOutput=true]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild;isOutput=true]$IsStableBuild"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/setup-maestro-vars.ps1
+ arguments: -ReleaseConfigsPath '$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt'
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
deleted file mode 100644
index f91751fe78e..00000000000
--- a/eng/common/templates/steps/publish-logs.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
-
-steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 30becf01ea5..05df886f55f 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -23,7 +23,6 @@ parameters:
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
@@ -56,7 +55,6 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
@@ -87,7 +85,6 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index d3a432878e2..92a053bd16b 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -5,7 +5,7 @@
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
-[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' }
+[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
# Binary log must be enabled on CI.
@@ -24,7 +24,7 @@
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
# Adjusts msbuild verbosity level.
-[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' }
+[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
@@ -41,23 +41,21 @@
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dot.net/v1/dotnet-install.ps1
-[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
+[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { "v1" }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
# An array of names of processes to stop on script exit if prepareMachine is true.
-$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
-
-$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null }
+$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") }
set-strictmode -version 2.0
-$ErrorActionPreference = 'Stop'
+$ErrorActionPreference = "Stop"
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
- New-Item -path $path -force -itemType 'Directory' | Out-Null
+ New-Item -path $path -force -itemType "Directory" | Out-Null
}
}
@@ -98,10 +96,7 @@ function Exec-Process([string]$command, [string]$commandArgs) {
}
}
-# createSdkLocationFile parameter enables a file being generated under the toolset directory
-# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
-# as dot sourcing isn't possible.
-function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+function InitializeDotNetCli([bool]$install) {
if (Test-Path variable:global:_DotNetInstallDir) {
return $global:_DotNetInstallDir
}
@@ -124,7 +119,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
- $dotnetCmd = Get-Command 'dotnet.exe' -ErrorAction SilentlyContinue
+ $dotnetCmd = Get-Command "dotnet.exe" -ErrorAction SilentlyContinue
if ($dotnetCmd -ne $null) {
$env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
}
@@ -137,13 +132,13 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
- $dotnetRoot = Join-Path $RepoRoot '.dotnet'
+ $dotnetRoot = Join-Path $RepoRoot ".dotnet"
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
if ($install) {
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
} else {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
ExitWithExitCode 1
}
}
@@ -151,24 +146,6 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
$env:DOTNET_INSTALL_DIR = $dotnetRoot
}
- # Creates a temporary file under the toolset dir.
- # The following code block is protecting against concurrent access so that this function can
- # be called in parallel.
- if ($createSdkLocationFile) {
- do {
- $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName())
- }
- until (!(Test-Path $sdkCacheFileTemp))
- Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
-
- try {
- Rename-Item -Force -Path $sdkCacheFileTemp 'sdk.txt'
- } catch {
- # Somebody beat us
- Remove-Item -Path $sdkCacheFileTemp
- }
- }
-
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
# It also ensures that VS msbuild will use the downloaded sdk targets.
@@ -177,6 +154,15 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
# Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if ($ci) {
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
+ }
+
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
@@ -184,7 +170,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
}
function GetDotNetInstallScript([string] $dotnetRoot) {
- $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ $installScript = Join-Path $dotnetRoot "dotnet-install.ps1"
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
@@ -194,17 +180,17 @@ function GetDotNetInstallScript([string] $dotnetRoot) {
return $installScript
}
-function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '') {
+function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") {
InstallDotNet $dotnetRoot $version $architecture
}
function InstallDotNet([string] $dotnetRoot,
[string] $version,
- [string] $architecture = '',
- [string] $runtime = '',
+ [string] $architecture = "",
+ [string] $runtime = "",
[bool] $skipNonVersionedFiles = $false,
- [string] $runtimeSourceFeed = '',
- [string] $runtimeSourceFeedKey = '') {
+ [string] $runtimeSourceFeed = "",
+ [string] $runtimeSourceFeedKey = "") {
$installScript = GetDotNetInstallScript $dotnetRoot
$installParameters = @{
@@ -220,7 +206,7 @@ function InstallDotNet([string] $dotnetRoot,
& $installScript @installParameters
}
catch {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from public location."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from public location."
# Only the runtime can be installed from a custom [private] location.
if ($runtime -and ($runtimeSourceFeed -or $runtimeSourceFeedKey)) {
@@ -236,7 +222,7 @@ function InstallDotNet([string] $dotnetRoot,
& $installScript @installParameters
}
catch {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'."
ExitWithExitCode 1
}
} else {
@@ -262,16 +248,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { '15.9' }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" }
$vsMinVersion = [Version]::new($vsMinVersionStr)
# Try msbuild command available in the environment.
if ($env:VSINSTALLDIR -ne $null) {
- $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
+ $msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue
if ($msbuildCmd -ne $null) {
# Workaround for https://github.com/dotnet/roslyn/issues/35793
# Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
- $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
+ $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0])
if ($msbuildVersion -ge $vsMinVersion) {
return $global:_MSBuildExe = $msbuildCmd.Path
@@ -291,20 +277,17 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
- if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
+ if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
} else {
$vsMajorVersion = $vsMinVersion.Major
$xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha"
}
-
- $vsInstallDir = $null
- if ($xcopyMSBuildVersion.Trim() -ine "none") {
- $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
- }
+
+ $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
- throw 'Unable to find Visual Studio that has required version and components installed'
+ throw "Unable to find Visual Studio that has required version and components installed"
}
}
@@ -328,7 +311,7 @@ function InstallXCopyMSBuild([string]$packageVersion) {
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = 'RoslynTools.MSBuild'
+ $packageName = "RoslynTools.MSBuild"
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
@@ -344,7 +327,7 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
Unzip $packagePath $packageDir
}
- return Join-Path $packageDir 'tools'
+ return Join-Path $packageDir "tools"
}
#
@@ -361,32 +344,32 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
# or $null if no instance meeting the requirements is found on the machine.
#
function LocateVisualStudio([object]$vsRequirements = $null){
- if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
+ if (Get-Member -InputObject $GlobalJson.tools -Name "vswhere") {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = '2.5.2'
+ $vswhereVersion = "2.5.2"
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
- $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
+ $vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host 'Downloading vswhere'
+ Write-Host "Downloading vswhere"
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
+ $args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild", "-products", "*")
- if (Get-Member -InputObject $vsRequirements -Name 'version') {
- $args += '-version'
+ if (Get-Member -InputObject $vsRequirements -Name "version") {
+ $args += "-version"
$args += $vsRequirements.version
}
- if (Get-Member -InputObject $vsRequirements -Name 'components') {
+ if (Get-Member -InputObject $vsRequirements -Name "components") {
foreach ($component in $vsRequirements.components) {
- $args += '-requires'
+ $args += "-requires"
$args += $component
}
}
@@ -412,27 +395,27 @@ function InitializeBuildTool() {
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
- if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
- if ($msbuildEngine -eq 'dotnet') {
+ if ($msbuildEngine -eq "dotnet") {
if (!$dotnetRoot) {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "/global.json must specify 'tools.dotnet'."
ExitWithExitCode 1
}
- $buildTool = @{ Path = Join-Path $dotnetRoot 'dotnet.exe'; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' }
+ $buildTool = @{ Path = Join-Path $dotnetRoot "dotnet.exe"; Command = "msbuild"; Tool = "dotnet"; Framework = "netcoreapp2.1" }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
} catch {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_
ExitWithExitCode 1
}
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
} else {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
}
@@ -441,15 +424,15 @@ function InitializeBuildTool() {
function GetDefaultMSBuildEngine() {
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
- if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
- return 'vs'
+ if (Get-Member -InputObject $GlobalJson.tools -Name "vs") {
+ return "vs"
}
- if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
- return 'dotnet'
+ if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
+ return "dotnet"
}
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
ExitWithExitCode 1
}
@@ -458,9 +441,9 @@ function GetNuGetPackageCachePath() {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
if ($useGlobalNuGetCache) {
- $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages'
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages"
} else {
- $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages'
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages"
}
}
@@ -473,7 +456,7 @@ function GetSdkTaskProject([string]$taskName) {
}
function InitializeNativeTools() {
- if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
+ if (Get-Member -InputObject $GlobalJson -Name "native-tools") {
$nativeArgs= @{}
if ($ci) {
$nativeArgs = @{
@@ -502,14 +485,14 @@ function InitializeToolset() {
}
if (-not $restore) {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$buildTool = InitializeBuildTool
- $proj = Join-Path $ToolsetDir 'restore.proj'
- $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
+ $proj = Join-Path $ToolsetDir "restore.proj"
+ $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" }
'' | Set-Content $proj
@@ -531,7 +514,7 @@ function ExitWithExitCode([int] $exitCode) {
}
function Stop-Processes() {
- Write-Host 'Killing running build processes...'
+ Write-Host "Killing running build processes..."
foreach ($processName in $processesToStopOnExit) {
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
}
@@ -548,18 +531,13 @@ function MSBuild() {
# Work around issues with Azure Artifacts credential provider
# https://github.com/dotnet/arcade/issues/3932
- if ($ci -and $buildTool.Tool -eq 'dotnet') {
+ if ($ci -and $buildTool.Tool -eq "dotnet") {
dotnet nuget locals http-cache -c
-
- $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
- $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
- Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
- Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
}
$toolsetBuildProject = InitializeToolset
$path = Split-Path -parent $toolsetBuildProject
- $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')
+ $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll")
$args += "/logger:$path"
}
@@ -574,12 +552,12 @@ function MSBuild() {
function MSBuild-Core() {
if ($ci) {
if (!$binaryLog) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build.'
+ Write-PipelineTaskError -Message "Binary log must be enabled in CI build."
ExitWithExitCode 1
}
if ($nodeReuse) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
+ Write-PipelineTaskError -Message "Node reuse must be disabled in CI build."
ExitWithExitCode 1
}
}
@@ -589,10 +567,10 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
if ($warnAsError) {
- $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
+ $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
}
else {
- $cmdArgs += ' /p:TreatWarningsAsErrors=false'
+ $cmdArgs += " /p:TreatWarningsAsErrors=false"
}
foreach ($arg in $args) {
@@ -604,7 +582,7 @@ function MSBuild-Core() {
$exitCode = Exec-Process $buildTool.Path $cmdArgs
if ($exitCode -ne 0) {
- Write-PipelineTelemetryError Category 'Build' -Message 'Build failed.'
+ Write-PipelineTaskError -Message "Build failed."
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
if ($buildLog -ne $null) {
@@ -619,12 +597,12 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) {
foreach ($argument in $arguments) {
if ($argument -ne $null) {
$arg = $argument.Trim()
- if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
- return $arg.Substring('/bl:'.Length)
+ if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
+ return $arg.Substring("/bl:".Length)
}
- if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
- return $arg.Substring('/binaryLogger:'.Length)
+ if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
+ return $arg.Substring("/binaryLogger:".Length)
}
}
}
@@ -634,14 +612,14 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) {
. $PSScriptRoot\pipeline-logging-functions.ps1
-$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
-$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
-$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
-$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
-$ToolsDir = Join-Path $RepoRoot '.tools'
-$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
-$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
-$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
+$ArtifactsDir = Join-Path $RepoRoot "artifacts"
+$ToolsetDir = Join-Path $ArtifactsDir "toolset"
+$ToolsDir = Join-Path $RepoRoot ".tools"
+$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
# true if global.json contains a "runtimes" section
$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
@@ -654,18 +632,3 @@ Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
-
-# Import custom tools configuration, if present in the repo.
-# Note: Import in global scope so that the script set top-level variables without qualification.
-if (!$disableConfigureToolsetImport) {
- $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
- if (Test-Path $configureToolsetScript) {
- . $configureToolsetScript
- if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
- if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
- ExitWithExitCode $LastExitCode
- }
- }
- }
-}
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index e071af4ee49..94965a8fd2a 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -81,7 +81,7 @@ function ReadGlobalVersion {
local pattern="\"$key\" *: *\"(.*)\""
if [[ ! $line =~ $pattern ]]; then
- Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Error: Cannot find \"$key\" in $global_json_file"
ExitWithExitCode 1
fi
@@ -152,6 +152,15 @@ function InitializeDotNetCli {
# build steps from using anything other than what we've downloaded.
Write-PipelinePrependPath -path "$dotnet_root"
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if [[ "$ci" == true ]]; then
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+ fi
+
Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
@@ -274,9 +283,6 @@ function GetNuGetPackageCachePath {
}
function InitializeNativeTools() {
- if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
- return
- fi
if grep -Fq "native-tools" $global_json_file
then
local nativeArgs=""
@@ -326,7 +332,7 @@ function InitializeToolset {
local toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
- Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
@@ -357,12 +363,7 @@ function MSBuild {
# Work around issues with Azure Artifacts credential provider
# https://github.com/dotnet/arcade/issues/3932
if [[ "$ci" == true ]]; then
- "$_InitializeBuildTool" nuget locals http-cache -c
-
- export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
- export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
- Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
- Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+ dotnet nuget locals http-cache -c
fi
local toolset_dir="${_InitializeToolset%/*}"
@@ -376,12 +377,12 @@ function MSBuild {
function MSBuild-Core {
if [[ "$ci" == true ]]; then
if [[ "$binary_log" != true ]]; then
- Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build."
+ Write-PipelineTaskError "Binary log must be enabled in CI build."
ExitWithExitCode 1
fi
if [[ "$node_reuse" == true ]]; then
- Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
+ Write-PipelineTaskError "Node reuse must be disabled in CI build."
ExitWithExitCode 1
fi
fi
@@ -395,7 +396,7 @@ function MSBuild-Core {
"$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
local exit_code=$?
- Write-PipelineTelemetryError -category 'Build' "Build failed (exit code '$exit_code')."
+ Write-PipelineTaskError "Build failed (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
}
@@ -436,18 +437,3 @@ Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
-
-# Import custom tools configuration, if present in the repo.
-if [ -z "${disable_configure_toolset_import:-}" ]; then
- configure_toolset_script="$eng_root/configure-toolset.sh"
- if [[ -a "$configure_toolset_script" ]]; then
- . "$configure_toolset_script"
- fi
-fi
-
-# TODO: https://github.com/dotnet/arcade/issues/1468
-# Temporary workaround to avoid breaking change.
-# Remove once repos are updated.
-if [[ -n "${useInstalledDotNetCli:-}" ]]; then
- use_installed_dotnet_cli="$useInstalledDotNetCli"
-fi
diff --git a/fcs/build.fsx b/fcs/build.fsx
index 1c2528dd2e0..af21298e7ab 100644
--- a/fcs/build.fsx
+++ b/fcs/build.fsx
@@ -66,16 +66,16 @@ Target.create "Build" (fun _ ->
runDotnet __SOURCE_DIRECTORY__ "build" "../src/buildtools/buildtools.proj -v n -c Proto"
let fslexPath = __SOURCE_DIRECTORY__ + "/../artifacts/bin/fslex/Proto/netcoreapp2.1/fslex.dll"
let fsyaccPath = __SOURCE_DIRECTORY__ + "/../artifacts/bin/fsyacc/Proto/netcoreapp2.1/fsyacc.dll"
- runDotnet __SOURCE_DIRECTORY__ "build" (sprintf "FSharp.Compiler.Service.sln -v n -c Release /p:FsLexPath=%s /p:FsYaccPath=%s" fslexPath fsyaccPath)
+ runDotnet __SOURCE_DIRECTORY__ "build" (sprintf "FSharp.Compiler.Service.sln -nodereuse:false -v n -c Release /p:DisableCompilerRedirection=true /p:FsLexPath=%s /p:FsYaccPath=%s" fslexPath fsyaccPath)
)
Target.create "Test" (fun _ ->
// This project file is used for the netcoreapp2.0 tests to work out reference sets
- runDotnet __SOURCE_DIRECTORY__ "build" "../tests/projects/Sample_NETCoreSDK_FSharp_Library_netstandard2_0/Sample_NETCoreSDK_FSharp_Library_netstandard2_0.fsproj -v n /restore /p:DisableCompilerRedirection=true"
+ runDotnet __SOURCE_DIRECTORY__ "build" "../tests/projects/Sample_NETCoreSDK_FSharp_Library_netstandard2_0/Sample_NETCoreSDK_FSharp_Library_netstandard2_0.fsproj -nodereuse:false -v n /restore /p:DisableCompilerRedirection=true"
// Now run the tests
let logFilePath = Path.Combine(__SOURCE_DIRECTORY__, "..", "artifacts", "TestResults", "Release", "FSharp.Compiler.Service.Test.xml")
- runDotnet __SOURCE_DIRECTORY__ "test" (sprintf "FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.Tests.fsproj --no-restore --no-build -v n -c Release --test-adapter-path . --logger \"nunit;LogFilePath=%s\"" logFilePath)
+ runDotnet __SOURCE_DIRECTORY__ "test" (sprintf "FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.Tests.fsproj --no-restore --no-build -nodereuse:false -v n -c Release --test-adapter-path . --logger \"nunit;LogFilePath=%s\"" logFilePath)
)
Target.create "NuGet" (fun _ ->
diff --git a/global.json b/global.json
index 699ccb65f85..f12de763877 100644
--- a/global.json
+++ b/global.json
@@ -10,7 +10,7 @@
}
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.19627.1",
+ "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19616.5",
"Microsoft.DotNet.Helix.Sdk": "2.0.0-beta.19069.2"
}
}
diff --git a/src/absil/ilascii.fs b/src/absil/ilascii.fs
index 4bd48cb59b3..e5b7782ba7e 100644
--- a/src/absil/ilascii.fs
+++ b/src/absil/ilascii.fs
@@ -9,7 +9,7 @@ open FSharp.Compiler.AbstractIL.IL
// set to the proper value at CompileOps.fs (BuildFrameworkTcImports)
// Only relevant when compiling FSharp.Core.dll
-let parseILGlobals = ref EcmaMscorlibILGlobals
+let mutable parseILGlobals = EcmaMscorlibILGlobals
/// Table of parsing and pretty printing data for instructions.
let noArgInstrs =
diff --git a/src/absil/ilascii.fsi b/src/absil/ilascii.fsi
index 3a8543e4e1c..67a5c9eb4b1 100644
--- a/src/absil/ilascii.fsi
+++ b/src/absil/ilascii.fsi
@@ -14,7 +14,7 @@ open FSharp.Compiler.AbstractIL.IL
// IL Parser state - must be initialized before parsing a module
// --------------------------------------------------------------------
-val parseILGlobals: ILGlobals ref
+val mutable parseILGlobals: ILGlobals
// --------------------------------------------------------------------
// IL Lexer and pretty-printer tables
diff --git a/src/absil/ildiag.fs b/src/absil/ildiag.fs
index 1cd20ad8750..d43bdf8dca4 100644
--- a/src/absil/ildiag.fs
+++ b/src/absil/ildiag.fs
@@ -5,18 +5,18 @@
module internal FSharp.Compiler.AbstractIL.Diagnostics
-let diagnosticsLog = ref (Some stdout)
+let mutable diagnosticsLog = Some stdout
-let setDiagnosticsChannel s = diagnosticsLog := s
+let setDiagnosticsChannel s = diagnosticsLog <- s
-let dflushn () = match !diagnosticsLog with None -> () | Some d -> d.WriteLine(); d.Flush()
-let dflush () = match !diagnosticsLog with None -> () | Some d -> d.Flush()
+let dflushn () = match diagnosticsLog with None -> () | Some d -> d.WriteLine(); d.Flush()
+let dflush () = match diagnosticsLog with None -> () | Some d -> d.Flush()
let dprintn (s:string) =
- match !diagnosticsLog with None -> () | Some d -> d.Write s; d.Write "\n"; dflush()
+ match diagnosticsLog with None -> () | Some d -> d.Write s; d.Write "\n"; dflush()
let dprintf (fmt: Format<_,_,_,_>) =
- Printf.kfprintf dflush (match !diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt
+ Printf.kfprintf dflush (match diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt
let dprintfn (fmt: Format<_,_,_,_>) =
- Printf.kfprintf dflushn (match !diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt
+ Printf.kfprintf dflushn (match diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt
diff --git a/src/absil/illib.fs b/src/absil/illib.fs
index 9a84e94e34a..33e0566a6c2 100644
--- a/src/absil/illib.fs
+++ b/src/absil/illib.fs
@@ -47,15 +47,15 @@ let LOH_SIZE_THRESHOLD_BYTES = 80_000
// Library: ReportTime
//---------------------------------------------------------------------
let reportTime =
- let tFirst = ref None
- let tPrev = ref None
+ let mutable tFirst =None
+ let mutable tPrev = None
fun showTimes descr ->
if showTimes then
let t = Process.GetCurrentProcess().UserProcessorTime.TotalSeconds
- let prev = match !tPrev with None -> 0.0 | Some t -> t
- let first = match !tFirst with None -> (tFirst := Some t; t) | Some t -> t
+ let prev = match tPrev with None -> 0.0 | Some t -> t
+ let first = match tFirst with None -> (tFirst <- Some t; t) | Some t -> t
printf "ilwrite: TIME %10.3f (total) %10.3f (delta) - %s\n" (t - first) (t - prev) descr
- tPrev := Some t
+ tPrev <- Some t
//-------------------------------------------------------------------------
// Library: projections
@@ -573,10 +573,10 @@ module String =
let getLines (str: string) =
use reader = new StringReader(str)
[|
- let line = ref (reader.ReadLine())
- while not (isNull !line) do
- yield !line
- line := reader.ReadLine()
+ let mutable line = reader.ReadLine()
+ while not (isNull line) do
+ yield line
+ line <- reader.ReadLine()
if str.EndsWithOrdinal("\n") then
// last trailing space not returned
// http://stackoverflow.com/questions/19365404/stringreader-omits-trailing-linebreak
diff --git a/src/absil/ilpars.fsy b/src/absil/ilpars.fsy
index 849c5f9fd13..e831677e50a 100644
--- a/src/absil/ilpars.fsy
+++ b/src/absil/ilpars.fsy
@@ -33,7 +33,7 @@ let resolveCurrentMethodSpecScope obj =
let findSystemRuntimeAssemblyRef() =
- match (!parseILGlobals).primaryAssemblyScopeRef with
+ match parseILGlobals.primaryAssemblyScopeRef with
| ILScopeRef.Assembly aref -> aref
| _ -> pfailwith "systemRuntimeScopeRef not set to valid assembly reference in parseILGlobals"
@@ -235,9 +235,9 @@ callKind:
*---------------------------------------------*/
typ: STRING
- { noMethodSpecScope (!parseILGlobals).typ_String }
+ { noMethodSpecScope parseILGlobals.typ_String }
| OBJECT
- { noMethodSpecScope (!parseILGlobals).typ_Object }
+ { noMethodSpecScope parseILGlobals.typ_Object }
| CLASS typeNameInst
{ resolveMethodSpecScopeThen $2 (fun tspec ->
noMethodSpecScope (mkILBoxedType tspec)) }
@@ -256,45 +256,45 @@ typ: STRING
| typ STAR
{ resolveMethodSpecScopeThen $1 (fun ty -> noMethodSpecScope (ILType.Ptr ty)) }
| CHAR
- { noMethodSpecScope (!parseILGlobals).typ_Char }
+ { noMethodSpecScope parseILGlobals.typ_Char }
| VOID
{ noMethodSpecScope ILType.Void }
| BOOL
- { noMethodSpecScope (!parseILGlobals).typ_Bool }
+ { noMethodSpecScope parseILGlobals.typ_Bool }
| INT8
- { noMethodSpecScope (!parseILGlobals).typ_SByte }
+ { noMethodSpecScope parseILGlobals.typ_SByte }
| INT16
- { noMethodSpecScope (!parseILGlobals).typ_Int16 }
+ { noMethodSpecScope parseILGlobals.typ_Int16 }
| INT32
- { noMethodSpecScope (!parseILGlobals).typ_Int32 }
+ { noMethodSpecScope parseILGlobals.typ_Int32 }
| INT64
- { noMethodSpecScope (!parseILGlobals).typ_Int64 }
+ { noMethodSpecScope parseILGlobals.typ_Int64 }
| FLOAT32
- { noMethodSpecScope (!parseILGlobals).typ_Single }
+ { noMethodSpecScope parseILGlobals.typ_Single }
| FLOAT64
- { noMethodSpecScope (!parseILGlobals).typ_Double }
+ { noMethodSpecScope parseILGlobals.typ_Double }
| UNSIGNED INT8
- { noMethodSpecScope (!parseILGlobals).typ_Byte }
+ { noMethodSpecScope parseILGlobals.typ_Byte }
| UNSIGNED INT16
- { noMethodSpecScope (!parseILGlobals).typ_UInt16 }
+ { noMethodSpecScope parseILGlobals.typ_UInt16 }
| UNSIGNED INT32
- { noMethodSpecScope (!parseILGlobals).typ_UInt32 }
+ { noMethodSpecScope parseILGlobals.typ_UInt32 }
| UNSIGNED INT64
- { noMethodSpecScope (!parseILGlobals).typ_UInt64 }
+ { noMethodSpecScope parseILGlobals.typ_UInt64 }
| UINT8
- { noMethodSpecScope (!parseILGlobals).typ_Byte }
+ { noMethodSpecScope parseILGlobals.typ_Byte }
| UINT16
- { noMethodSpecScope (!parseILGlobals).typ_UInt16 }
+ { noMethodSpecScope parseILGlobals.typ_UInt16 }
| UINT32
- { noMethodSpecScope (!parseILGlobals).typ_UInt32 }
+ { noMethodSpecScope parseILGlobals.typ_UInt32 }
| UINT64
- { noMethodSpecScope (!parseILGlobals).typ_UInt64 }
+ { noMethodSpecScope parseILGlobals.typ_UInt64 }
| NATIVE INT
- { noMethodSpecScope (!parseILGlobals).typ_IntPtr }
+ { noMethodSpecScope parseILGlobals.typ_IntPtr }
| NATIVE UNSIGNED INT
- { noMethodSpecScope (!parseILGlobals).typ_UIntPtr }
+ { noMethodSpecScope parseILGlobals.typ_UIntPtr }
| NATIVE UINT
- { noMethodSpecScope (!parseILGlobals).typ_UIntPtr }
+ { noMethodSpecScope parseILGlobals.typ_UIntPtr }
| BANG int32
{ noMethodSpecScope (ILType.TypeVar (uint16 ( $2))) }
diff --git a/src/absil/ilread.fs b/src/absil/ilread.fs
index fed858c4d50..aec47ac3be4 100644
--- a/src/absil/ilread.fs
+++ b/src/absil/ilread.fs
@@ -522,8 +522,8 @@ let instrs () =
// The tables are delayed to avoid building them unnecessarily at startup
// Many applications of AbsIL (e.g. a compiler) don't need to read instructions.
-let oneByteInstrs = ref None
-let twoByteInstrs = ref None
+let mutable oneByteInstrs = None
+let mutable twoByteInstrs = None
let fillInstrs () =
let oneByteInstrTable = Array.create 256 I_invalid_instr
let twoByteInstrTable = Array.create 256 I_invalid_instr
@@ -542,16 +542,16 @@ let fillInstrs () =
oneByteInstrTable.[i] <- f
List.iter addInstr (instrs())
List.iter (fun (x, mk) -> addInstr (x, I_none_instr (noPrefixes mk))) (noArgInstrs.Force())
- oneByteInstrs := Some oneByteInstrTable
- twoByteInstrs := Some twoByteInstrTable
+ oneByteInstrs <- Some oneByteInstrTable
+ twoByteInstrs <- Some twoByteInstrTable
let rec getOneByteInstr i =
- match !oneByteInstrs with
+ match oneByteInstrs with
| None -> fillInstrs(); getOneByteInstr i
| Some t -> t.[i]
let rec getTwoByteInstr i =
- match !twoByteInstrs with
+ match twoByteInstrs with
| None -> fillInstrs(); getTwoByteInstr i
| Some t -> t.[i]
@@ -680,20 +680,20 @@ type GenericParamsIdx = GenericParamsIdx of int * TypeOrMethodDefTag * int
let mkCacheInt32 lowMem _inbase _nm _sz =
if lowMem then (fun f x -> f x) else
- let cache = ref null
- let count = ref 0
+ let mutable cache = null
+ let mutable count = 0
#if STATISTICS
addReport (fun oc -> if !count <> 0 then oc.WriteLine ((_inbase + string !count + " "+ _nm + " cache hits"): string))
#endif
fun f (idx: int32) ->
let cache =
- match !cache with
- | null -> cache := new Dictionary(11)
+ match cache with
+ | null -> cache <- new Dictionary(11)
| _ -> ()
- !cache
+ cache
match cache.TryGetValue idx with
| true, res ->
- incr count
+ count <- count + 1
res
| _ ->
let res = f idx
@@ -702,20 +702,20 @@ let mkCacheInt32 lowMem _inbase _nm _sz =
let mkCacheGeneric lowMem _inbase _nm _sz =
if lowMem then (fun f x -> f x) else
- let cache = ref null
- let count = ref 0
+ let mutable cache = null
+ let mutable count = 0
#if STATISTICS
addReport (fun oc -> if !count <> 0 then oc.WriteLine ((_inbase + string !count + " " + _nm + " cache hits"): string))
#endif
fun f (idx :'T) ->
let cache =
- match !cache with
- | null -> cache := new Dictionary<_, _>(11 (* sz: int *) )
+ match cache with
+ | null -> cache <- new Dictionary<_, _>(11 (* sz: int *) )
| _ -> ()
- !cache
+ cache
match cache.TryGetValue idx with
| true, v ->
- incr count
+ count <- count + 1
v
| _ ->
let res = f idx
@@ -804,12 +804,12 @@ let seekReadIndexedRows (numRows, rowReader, keyFunc, keyComparer, binaryChop, r
res
else
- let res = ref []
+ let mutable res = []
for i = 1 to numRows do
let rowinfo = rowReader i
if keyComparer (keyFunc rowinfo) = 0 then
- res := rowConverter rowinfo :: !res
- List.rev !res
+ res <- rowConverter rowinfo :: res
+ List.rev res
let seekReadOptionalIndexedRow info =
@@ -1352,26 +1352,26 @@ let getDataEndPointsDelayed (pectxt: PEReader) ctxtH =
let (ctxt: ILMetadataReader) = getHole ctxtH
let mdv = ctxt.mdfile.GetView()
let dataStartPoints =
- let res = ref []
+ let mutable res = []
for i = 1 to ctxt.getNumRows TableNames.FieldRVA do
let rva, _fidx = seekReadFieldRVARow ctxt mdv i
- res := ("field", rva) :: !res
+ res <- ("field", rva) :: res
for i = 1 to ctxt.getNumRows TableNames.ManifestResource do
let (offset, _, _, TaggedIndex(_tag, idx)) = seekReadManifestResourceRow ctxt mdv i
if idx = 0 then
let rva = pectxt.resourcesAddr + offset
- res := ("manifest resource", rva) :: !res
- !res
+ res <- ("manifest resource", rva) :: res
+ res
if isNil dataStartPoints then []
else
let methodRVAs =
- let res = ref []
+ let mutable res = []
for i = 1 to ctxt.getNumRows TableNames.Method do
let (rva, _, _, nameIdx, _, _) = seekReadMethodRow ctxt mdv i
if rva <> 0 then
let nm = readStringHeap ctxt nameIdx
- res := (nm, rva) :: !res
- !res
+ res <- (nm, rva) :: res
+ res
([ pectxt.textSegmentPhysicalLoc + pectxt.textSegmentPhysicalSize
pectxt.dataSegmentPhysicalLoc + pectxt.dataSegmentPhysicalSize ]
@
@@ -2196,20 +2196,20 @@ and seekReadMethod (ctxt: ILMetadataReader) mdv numtypars (idx: int) =
and seekReadParams (ctxt: ILMetadataReader) mdv (retty, argtys) pidx1 pidx2 =
- let retRes = ref (mkILReturn retty)
+ let mutable retRes = mkILReturn retty
let paramsRes = argtys |> List.toArray |> Array.map mkILParamAnon
for i = pidx1 to pidx2 - 1 do
- seekReadParamExtras ctxt mdv (retRes, paramsRes) i
- !retRes, List.ofArray paramsRes
+ seekReadParamExtras ctxt mdv (&retRes, paramsRes) i
+ retRes, List.ofArray paramsRes
-and seekReadParamExtras (ctxt: ILMetadataReader) mdv (retRes, paramsRes) (idx: int) =
+and seekReadParamExtras (ctxt: ILMetadataReader) mdv (retRes: byref, paramsRes) (idx: int) =
let (flags, seq, nameIdx) = seekReadParamRow ctxt mdv idx
let inOutMasked = (flags &&& 0x00FF)
let hasMarshal = (flags &&& 0x2000) <> 0x0
let hasDefault = (flags &&& 0x1000) <> 0x0
let fmReader idx = seekReadIndexedRow (ctxt.getNumRows TableNames.FieldMarshal, seekReadFieldMarshalRow ctxt mdv, fst, hfmCompare idx, isSorted ctxt TableNames.FieldMarshal, (snd >> readBlobHeapAsNativeType ctxt))
if seq = 0 then
- retRes := { !retRes with
+ retRes <- { retRes with
Marshal=(if hasMarshal then Some (fmReader (TaggedIndex(hfm_ParamDef, idx))) else None)
CustomAttrsStored = ctxt.customAttrsReader_ParamDef
MetadataIndex = idx}
@@ -2481,37 +2481,37 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
ilOffsetsOfLabels.[lab] <- ilOffset
let ibuf = new ResizeArray<_>(sz/2)
- let curr = ref 0
+ let mutable curr = 0
let prefixes = { al=Aligned; tl= Normalcall; vol= Nonvolatile;ro=NormalAddress;constrained=None }
- let lastb = ref 0x0
- let lastb2 = ref 0x0
- let b = ref 0x0
+ let mutable lastb = 0x0
+ let mutable lastb2 = 0x0
+ let mutable b = 0x0
let get () =
- lastb := seekReadByteAsInt32 pev (start + (!curr))
- incr curr
- b :=
- if !lastb = 0xfe && !curr < sz then
- lastb2 := seekReadByteAsInt32 pev (start + (!curr))
- incr curr
- !lastb2
+ lastb <- seekReadByteAsInt32 pev (start + curr)
+ curr <- curr + 1
+ b <-
+ if lastb = 0xfe && curr < sz then
+ lastb2 <- seekReadByteAsInt32 pev (start + curr)
+ curr <- curr + 1
+ lastb2
else
- !lastb
+ lastb
- let seqPointsRemaining = ref seqpoints
+ let mutable seqPointsRemaining = seqpoints
- while !curr < sz do
+ while curr < sz do
// registering "+string !curr+" as start of an instruction")
- markAsInstructionStart !curr ibuf.Count
+ markAsInstructionStart curr ibuf.Count
// Insert any sequence points into the instruction sequence
while
- (match !seqPointsRemaining with
- | (i, _tag) :: _rest when i <= !curr -> true
+ (match seqPointsRemaining with
+ | (i, _tag) :: _rest when i <= curr -> true
| _ -> false)
do
// Emitting one sequence point
- let (_, tag) = List.head !seqPointsRemaining
- seqPointsRemaining := List.tail !seqPointsRemaining
+ let (_, tag) = List.head seqPointsRemaining
+ seqPointsRemaining <- List.tail seqPointsRemaining
ibuf.Add (I_seqpoint tag)
// Read the prefixes. Leave lastb and lastb2 holding the instruction byte(s)
@@ -2522,27 +2522,27 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
prefixes.ro<-NormalAddress
prefixes.constrained<-None
get ()
- while !curr < sz &&
- !lastb = 0xfe &&
- (!b = (i_constrained &&& 0xff) ||
- !b = (i_readonly &&& 0xff) ||
- !b = (i_unaligned &&& 0xff) ||
- !b = (i_volatile &&& 0xff) ||
- !b = (i_tail &&& 0xff)) do
+ while curr < sz &&
+ lastb = 0xfe &&
+ (b = (i_constrained &&& 0xff) ||
+ b = (i_readonly &&& 0xff) ||
+ b = (i_unaligned &&& 0xff) ||
+ b = (i_volatile &&& 0xff) ||
+ b = (i_tail &&& 0xff)) do
begin
- if !b = (i_unaligned &&& 0xff) then
- let unal = seekReadByteAsInt32 pev (start + (!curr))
- incr curr
+ if b = (i_unaligned &&& 0xff) then
+ let unal = seekReadByteAsInt32 pev (start + curr)
+ curr <- curr + 1
prefixes.al <-
if unal = 0x1 then Unaligned1
elif unal = 0x2 then Unaligned2
elif unal = 0x4 then Unaligned4
else (dprintn "bad alignment for unaligned"; Aligned)
- elif !b = (i_volatile &&& 0xff) then prefixes.vol <- Volatile
- elif !b = (i_readonly &&& 0xff) then prefixes.ro <- ReadonlyAddress
- elif !b = (i_constrained &&& 0xff) then
- let uncoded = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ elif b = (i_volatile &&& 0xff) then prefixes.vol <- Volatile
+ elif b = (i_readonly &&& 0xff) then prefixes.ro <- ReadonlyAddress
+ elif b = (i_constrained &&& 0xff) then
+ let uncoded = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
let ty = seekReadTypeDefOrRef ctxt numtypars AsObject [] (uncodedTokenToTypeDefOrRefOrSpec uncoded)
prefixes.constrained <- Some ty
else prefixes.tl <- Tailcall
@@ -2552,45 +2552,45 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
// data for instruction begins at "+string !curr
// Read and decode the instruction
- if (!curr <= sz) then
+ if (curr <= sz) then
let idecoder =
- if !lastb = 0xfe then getTwoByteInstr ( !lastb2)
- else getOneByteInstr ( !lastb)
+ if lastb = 0xfe then getTwoByteInstr lastb2
+ else getOneByteInstr lastb
let instr =
match idecoder with
| I_u16_u8_instr f ->
- let x = seekReadByte pev (start + (!curr)) |> uint16
- curr := !curr + 1
+ let x = seekReadByte pev (start + curr) |> uint16
+ curr <- curr + 1
f prefixes x
| I_u16_u16_instr f ->
- let x = seekReadUInt16 pev (start + (!curr))
- curr := !curr + 2
+ let x = seekReadUInt16 pev (start + curr)
+ curr <- curr + 2
f prefixes x
| I_none_instr f ->
f prefixes
| I_i64_instr f ->
- let x = seekReadInt64 pev (start + (!curr))
- curr := !curr + 8
+ let x = seekReadInt64 pev (start + curr)
+ curr <- curr + 8
f prefixes x
| I_i32_i8_instr f ->
- let x = seekReadSByte pev (start + (!curr)) |> int32
- curr := !curr + 1
+ let x = seekReadSByte pev (start + curr) |> int32
+ curr <- curr + 1
f prefixes x
| I_i32_i32_instr f ->
- let x = seekReadInt32 pev (start + (!curr))
- curr := !curr + 4
+ let x = seekReadInt32 pev (start + curr)
+ curr <- curr + 4
f prefixes x
| I_r4_instr f ->
- let x = seekReadSingle pev (start + (!curr))
- curr := !curr + 4
+ let x = seekReadSingle pev (start + curr)
+ curr <- curr + 4
f prefixes x
| I_r8_instr f ->
- let x = seekReadDouble pev (start + (!curr))
- curr := !curr + 8
+ let x = seekReadDouble pev (start + curr)
+ curr <- curr + 8
f prefixes x
| I_field_instr f ->
- let (tab, tok) = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let (tab, tok) = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
let fspec =
if tab = TableNames.Field then
seekReadFieldDefAsFieldSpec ctxt tok
@@ -2601,8 +2601,8 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
| I_method_instr f ->
// method instruction, curr = "+string !curr
- let (tab, idx) = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let (tab, idx) = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
let (VarArgMethodData(enclTy, cc, nm, argtys, varargs, retty, minst)) =
if tab = TableNames.Method then
seekReadMethodDefOrRef ctxt numtypars (TaggedIndex(mdor_MethodDef, idx))
@@ -2623,42 +2623,42 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
let mspec = mkILMethSpecInTy (enclTy, cc, nm, argtys, retty, minst)
f prefixes (mspec, varargs)
| I_type_instr f ->
- let uncoded = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let uncoded = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
let ty = seekReadTypeDefOrRef ctxt numtypars AsObject [] (uncodedTokenToTypeDefOrRefOrSpec uncoded)
f prefixes ty
| I_string_instr f ->
- let (tab, idx) = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let (tab, idx) = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
if tab <> TableNames.UserStrings then dprintn "warning: bad table in user string for ldstr"
f prefixes (readUserStringHeap ctxt idx)
| I_conditional_i32_instr f ->
- let offsDest = (seekReadInt32 pev (start + (!curr)))
- curr := !curr + 4
- let dest = !curr + offsDest
+ let offsDest = (seekReadInt32 pev (start + curr))
+ curr <- curr + 4
+ let dest = curr + offsDest
f prefixes (rawToLabel dest)
| I_conditional_i8_instr f ->
- let offsDest = int (seekReadSByte pev (start + (!curr)))
- curr := !curr + 1
- let dest = !curr + offsDest
+ let offsDest = int (seekReadSByte pev (start + curr))
+ curr <- curr + 1
+ let dest = curr + offsDest
f prefixes (rawToLabel dest)
| I_unconditional_i32_instr f ->
- let offsDest = (seekReadInt32 pev (start + (!curr)))
- curr := !curr + 4
- let dest = !curr + offsDest
+ let offsDest = (seekReadInt32 pev (start + curr))
+ curr <- curr + 4
+ let dest = curr + offsDest
f prefixes (rawToLabel dest)
| I_unconditional_i8_instr f ->
- let offsDest = int (seekReadSByte pev (start + (!curr)))
- curr := !curr + 1
- let dest = !curr + offsDest
+ let offsDest = int (seekReadSByte pev (start + curr))
+ curr <- curr + 1
+ let dest = curr + offsDest
f prefixes (rawToLabel dest)
| I_invalid_instr ->
- dprintn ("invalid instruction: "+string !lastb+ (if !lastb = 0xfe then ", "+string !lastb2 else ""))
+ dprintn ("invalid instruction: "+string lastb + (if lastb = 0xfe then ", "+ string lastb2 else ""))
I_ret
| I_tok_instr f ->
- let (tab, idx) = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let (tab, idx) = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
(* REVIEW: this incorrectly labels all MemberRef tokens as ILMethod's: we should go look at the MemberRef sig to determine if it is a field or method *)
let token_info =
if tab = TableNames.Method || tab = TableNames.MemberRef (* REVIEW: generics or tab = TableNames.MethodSpec *) then
@@ -2671,26 +2671,26 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s
else failwith "bad token for ldtoken"
f prefixes token_info
| I_sig_instr f ->
- let (tab, idx) = seekReadUncodedToken pev (start + (!curr))
- curr := !curr + 4
+ let (tab, idx) = seekReadUncodedToken pev (start + curr)
+ curr <- curr + 4
if tab <> TableNames.StandAloneSig then dprintn "strange table for callsig token"
let generic, _genarity, cc, retty, argtys, varargs = readBlobHeapAsMethodSig ctxt numtypars (seekReadStandAloneSigRow ctxt mdv idx)
if generic then failwith "bad image: a generic method signature is begin used at a calli instruction"
f prefixes (mkILCallSig (cc, argtys, retty), varargs)
| I_switch_instr f ->
- let n = (seekReadInt32 pev (start + (!curr)))
- curr := !curr + 4
+ let n = (seekReadInt32 pev (start + curr))
+ curr <- curr + 1
let offsets =
List.init n (fun _ ->
- let i = (seekReadInt32 pev (start + (!curr)))
- curr := !curr + 4
+ let i = (seekReadInt32 pev (start + curr))
+ curr <- curr + 4
i)
- let dests = List.map (fun offs -> rawToLabel (!curr + offs)) offsets
+ let dests = List.map (fun offs -> rawToLabel (curr + offs)) offsets
f prefixes dests
ibuf.Add instr
done
// Finished reading instructions - mark the end of the instruction stream in case the PDB information refers to it.
- markAsInstructionStart !curr ibuf.Count
+ markAsInstructionStart curr ibuf.Count
// Build the function that maps from raw labels (offsets into the bytecode stream) to indexes in the AbsIL instruction stream
let lab2pc = ilOffsetsOfLabels
@@ -2823,11 +2823,11 @@ and seekReadMethodRVA (pectxt: PEReader) (ctxt: ILMetadataReader) (idx, nm, _int
// Read all the sections that follow the method body.
// These contain the exception clauses.
- let nextSectionBase = ref (align 4 (codeBase + codeSize))
- let moreSections = ref hasMoreSections
- let seh = ref []
- while !moreSections do
- let sectionBase = !nextSectionBase
+ let mutable nextSectionBase = align 4 (codeBase + codeSize)
+ let mutable moreSections = hasMoreSections
+ let mutable seh = []
+ while moreSections do
+ let sectionBase = nextSectionBase
let sectionFlag = seekReadByte pev sectionBase
// fat format for "+nm+", sectionFlag = " + string sectionFlag)
let sectionSize, clauses =
@@ -2907,16 +2907,16 @@ and seekReadMethodRVA (pectxt: PEReader) (ctxt: ILMetadataReader) (idx, nm, _int
| _ -> sehMap.[key] <- [clause])
clauses
([], sehMap) ||> Seq.fold (fun acc (KeyValue(key, bs)) -> [ for b in bs -> {Range=key; Clause=b}: ILExceptionSpec ] @ acc)
- seh := sehClauses
- moreSections := (sectionFlag &&& e_CorILMethod_Sect_MoreSects) <> 0x0uy
- nextSectionBase := sectionBase + sectionSize
+ seh <- sehClauses
+ moreSections <- (sectionFlag &&& e_CorILMethod_Sect_MoreSects) <> 0x0uy
+ nextSectionBase <- sectionBase + sectionSize
done (* while *)
(* Convert the linear code format to the nested code format *)
if logging then dprintn ("doing localPdbInfos2")
let localPdbInfos2 = List.map (fun f -> f raw2nextLab) localPdbInfos
if logging then dprintn ("done localPdbInfos2, checking code...")
- let code = buildILCode nm lab2pc instrs !seh localPdbInfos2
+ let code = buildILCode nm lab2pc instrs seh localPdbInfos2
if logging then dprintn ("done checking code.")
MethodBody.IL
{ IsZeroInit=initlocals
@@ -3141,19 +3141,19 @@ let openMetadataReader (fileName, mdfile: BinaryFile, metadataPhysLoc, peinfo, p
else
let offset = seekReadInt32 mdv (pos + 0)
let length = seekReadInt32 mdv (pos + 4)
- let res = ref true
- let fin = ref false
- let n = ref 0
+ let mutable res = true
+ let mutable fin = false
+ let mutable n = 0
// read and compare the stream name byte by byte
- while (not !fin) do
- let c= seekReadByteAsInt32 mdv (pos + 8 + (!n))
+ while (not fin) do
+ let c= seekReadByteAsInt32 mdv (pos + 8 + n)
if c = 0 then
- fin := true
- elif !n >= Array.length name || c <> name.[!n] then
- res := false
- incr n
- if !res then Some(offset + metadataPhysLoc, length)
- else look (i+1) (align 0x04 (pos + 8 + (!n)))
+ fin<- true
+ elif n >= Array.length name || c <> name.[n] then
+ res <- false
+ n <- n + 1
+ if res then Some(offset + metadataPhysLoc, length)
+ else look (i+1) (align 0x04 (pos + 8 + n))
look 0 streamHeadersStart
let findStream name =
@@ -3248,15 +3248,15 @@ let openMetadataReader (fileName, mdfile: BinaryFile, metadataPhysLoc, peinfo, p
let valid = seekReadInt64 mdv (tablesStreamPhysLoc + 8)
let sorted = seekReadInt64 mdv (tablesStreamPhysLoc + 16)
let tablesPresent, tableRowCount, startOfTables =
- let present = ref []
+ let mutable present = []
let numRows = Array.create 64 0
- let prevNumRowIdx = ref (tablesStreamPhysLoc + 24)
+ let mutable prevNumRowIdx = tablesStreamPhysLoc + 24
for i = 0 to 63 do
if (valid &&& (int64 1 <<< i)) <> int64 0 then
- present := i :: !present
- numRows.[i] <- (seekReadInt32 mdv !prevNumRowIdx)
- prevNumRowIdx := !prevNumRowIdx + 4
- List.rev !present, numRows, !prevNumRowIdx
+ present <- i :: present
+ numRows.[i] <- (seekReadInt32 mdv prevNumRowIdx)
+ prevNumRowIdx <- prevNumRowIdx + 4
+ List.rev present, numRows, prevNumRowIdx
let getNumRows (tab: TableName) = tableRowCount.[tab.Index]
let numTables = tablesPresent.Length
diff --git a/src/fsharp/CompileOps.fs b/src/fsharp/CompileOps.fs
index 45d93c5cec3..067d3353a64 100644
--- a/src/fsharp/CompileOps.fs
+++ b/src/fsharp/CompileOps.fs
@@ -2479,7 +2479,7 @@ type AssemblyResolution =
resolvedPath: string
prepareToolTip: unit -> string
sysdir: bool
- ilAssemblyRef: ILAssemblyRef option ref
+ mutable ilAssemblyRef: ILAssemblyRef option
}
override this.ToString() = sprintf "%s%s" (if this.sysdir then "[sys]" else "") this.resolvedPath
@@ -2494,7 +2494,7 @@ type AssemblyResolution =
//
member this.GetILAssemblyRef(ctok, reduceMemoryUsage, tryGetMetadataSnapshot) =
cancellable {
- match !this.ilAssemblyRef with
+ match this.ilAssemblyRef with
| Some assemblyRef -> return assemblyRef
| None ->
let! assemblyRefOpt =
@@ -2522,7 +2522,7 @@ type AssemblyResolution =
tryGetMetadataSnapshot = tryGetMetadataSnapshot }
use reader = OpenILModuleReader this.resolvedPath readerSettings
mkRefToILAssembly reader.ILModuleDef.ManifestOfAssembly
- this.ilAssemblyRef := Some assemblyRef
+ this.ilAssemblyRef <- Some assemblyRef
return assemblyRef
}
@@ -2892,7 +2892,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) =
resolvedPath = resolved
prepareToolTip = (fun () -> resolved)
sysdir = sysdir
- ilAssemblyRef = ref None }
+ ilAssemblyRef = None }
| None ->
if String.Compare(ext, ".dll", StringComparison.OrdinalIgnoreCase)=0
@@ -2927,7 +2927,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) =
let line(append: string) = append.Trim([|' '|])+"\n"
line resolved + line fusionName)
sysdir = sysdir
- ilAssemblyRef = ref None }
+ ilAssemblyRef = None }
| None -> None
else None
@@ -3057,7 +3057,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) =
resolvedPath=canonicalItemSpec
prepareToolTip = (fun () -> resolvedFile.prepareToolTip (originalReference.Text, canonicalItemSpec))
sysdir= tcConfig.IsSystemAssembly canonicalItemSpec
- ilAssemblyRef = ref None })
+ ilAssemblyRef = None })
(maxIndexOfReference, assemblyResolutions))
// When calculating the resulting resolutions, we're going to use the index of the reference
@@ -3395,7 +3395,7 @@ let ParseOneInputLexbuf (tcConfig: TcConfig, lexResourceManager, conditionalComp
try
let skip = true in (* don't report whitespace from lexer *)
let lightSyntaxStatus = LightSyntaxStatus (tcConfig.ComputeLightSyntaxInitialStatus filename, true)
- let lexargs = mkLexargs (filename, conditionalCompilationDefines@tcConfig.conditionalCompilationDefines, lightSyntaxStatus, lexResourceManager, ref [], errorLogger, tcConfig.pathMap)
+ let lexargs = mkLexargs (filename, conditionalCompilationDefines@tcConfig.conditionalCompilationDefines, lightSyntaxStatus, lexResourceManager, [], errorLogger, tcConfig.pathMap)
let shortFilename = SanitizeFileName filename tcConfig.implicitIncludeDir
let input =
Lexhelp.usingLexbufForParsing (lexbuf, filename) (fun lexbuf ->
@@ -3525,24 +3525,24 @@ type TcAssemblyResolutions(tcConfig: TcConfig, results: AssemblyResolution list,
let frameworkDLLs, nonFrameworkReferences = resolutions.GetAssemblyResolutions() |> List.partition (fun r -> r.sysdir)
let unresolved = resolutions.GetUnresolvedReferences()
#if DEBUG
- let itFailed = ref false
+ let mutable itFailed = false
let addedText = "\nIf you want to debug this right now, attach a debugger, and put a breakpoint in 'CompileOps.fs' near the text '!itFailed', and you can re-step through the assembly resolution logic."
unresolved
|> List.iter (fun (UnresolvedAssemblyReference(referenceText, _ranges)) ->
if referenceText.Contains("mscorlib") then
System.Diagnostics.Debug.Assert(false, sprintf "whoops, did not resolve mscorlib: '%s'%s" referenceText addedText)
- itFailed := true)
+ itFailed <- true)
frameworkDLLs
|> List.iter (fun x ->
if not(FileSystem.IsPathRootedShim(x.resolvedPath)) then
System.Diagnostics.Debug.Assert(false, sprintf "frameworkDLL should be absolute path: '%s'%s" x.resolvedPath addedText)
- itFailed := true)
+ itFailed <- true)
nonFrameworkReferences
|> List.iter (fun x ->
if not(FileSystem.IsPathRootedShim(x.resolvedPath)) then
System.Diagnostics.Debug.Assert(false, sprintf "nonFrameworkReference should be absolute path: '%s'%s" x.resolvedPath addedText)
- itFailed := true)
- if !itFailed then
+ itFailed <- true)
+ if itFailed then
// idea is, put a breakpoint here and then step through
let assemblyList = TcAssemblyResolutions.GetAllDllReferences tcConfig
let resolutions = TcAssemblyResolutions.ResolveAssemblyReferences (ctok, tcConfig, assemblyList, [])
@@ -4207,9 +4207,9 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse
let systemRuntimeContainsType =
// NOTE: do not touch this, edit: but we did, we had no choice - TPs cannot hold a strong reference on TcImports "ever".
let tcImports = tcImportsWeak
- let systemRuntimeContainsTypeRef = ref (fun typeName -> tcImports.SystemRuntimeContainsType typeName)
- tcImportsStrong.AttachDisposeTypeProviderAction(fun () -> systemRuntimeContainsTypeRef := (fun _ -> raise (System.ObjectDisposedException("The type provider has been disposed"))))
- fun arg -> systemRuntimeContainsTypeRef.Value arg
+ let mutable systemRuntimeContainsTypeRef = fun typeName -> tcImports.SystemRuntimeContainsType typeName
+ tcImportsStrong.AttachDisposeTypeProviderAction(fun () -> systemRuntimeContainsTypeRef <- fun _ -> raise (System.ObjectDisposedException("The type provider has been disposed")))
+ fun arg -> systemRuntimeContainsTypeRef arg
let providers =
[ for designTimeAssemblyName in designTimeAssemblyNames do
@@ -4676,7 +4676,7 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse
error(InternalError("BuildFrameworkTcImports: no successful import of "+coreLibraryResolution.resolvedPath, coreLibraryResolution.originalReference.Range))
| None ->
error(InternalError(sprintf "BuildFrameworkTcImports: no resolution of '%s'" coreLibraryReference.Text, rangeStartup))
- IlxSettings.ilxFsharpCoreLibAssemRef :=
+ IlxSettings.ilxFsharpCoreLibAssemRef <-
(let scoref = fslibCcuInfo.ILScopeRef
match scoref with
| ILScopeRef.Assembly aref -> Some aref
@@ -4691,11 +4691,11 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse
#if DEBUG
// the global_g reference cell is used only for debug printing
- global_g := Some tcGlobals
+ global_g <- Some tcGlobals
#endif
// do this prior to parsing, since parsing IL assembly code may refer to mscorlib
#if !NO_INLINE_IL_PARSER
- FSharp.Compiler.AbstractIL.Internal.AsciiConstants.parseILGlobals := tcGlobals.ilg
+ FSharp.Compiler.AbstractIL.Internal.AsciiConstants.parseILGlobals <- tcGlobals.ilg
#endif
frameworkTcImports.SetTcGlobals tcGlobals
return tcGlobals, frameworkTcImports
@@ -5035,8 +5035,8 @@ module private ScriptPreprocessClosure =
(tcConfig: TcConfig, inp: ParsedInput, pathOfMetaCommandSource) =
let tcConfigB = tcConfig.CloneOfOriginalBuilder
- let nowarns = ref []
- let getWarningNumber = fun () (m, s) -> nowarns := (s, m) :: !nowarns
+ let mutable nowarns = []
+ let getWarningNumber = fun () (m, s) -> nowarns <- (s, m) :: nowarns
let addReferencedAssemblyByPath = fun () (m, s) -> tcConfigB.AddReferencedAssemblyByPath(m, s)
let addLoadedSource = fun () (m, s) -> tcConfigB.AddLoadedSource(m, s, pathOfMetaCommandSource)
try
@@ -5056,7 +5056,7 @@ module private ScriptPreprocessClosure =
(closureSources, tcConfig: TcConfig, codeContext,
lexResourceManager: Lexhelp.LexResourceManager) =
- let tcConfig = ref tcConfig
+ let mutable tcConfig = tcConfig
let observedSources = Observed()
let rec loop (ClosureSource(filename, m, sourceText, parseRequired)) =
@@ -5067,7 +5067,7 @@ module private ScriptPreprocessClosure =
let parseResult, parseDiagnostics =
let errorLogger = CapturingErrorLogger("FindClosureParse")
use _unwindEL = PushErrorLoggerPhaseUntilUnwind (fun _ -> errorLogger)
- let result = ParseScriptText (filename, sourceText, !tcConfig, codeContext, lexResourceManager, errorLogger)
+ let result = ParseScriptText (filename, sourceText, tcConfig, codeContext, lexResourceManager, errorLogger)
result, errorLogger.Diagnostics
match parseResult with
@@ -5075,12 +5075,12 @@ module private ScriptPreprocessClosure =
let errorLogger = CapturingErrorLogger("FindClosureMetaCommands")
use _unwindEL = PushErrorLoggerPhaseUntilUnwind (fun _ -> errorLogger)
let pathOfMetaCommandSource = Path.GetDirectoryName filename
- let preSources = (!tcConfig).GetAvailableLoadedSources()
+ let preSources = tcConfig.GetAvailableLoadedSources()
- let tcConfigResult, noWarns = ApplyMetaCommandsFromInputToTcConfigAndGatherNoWarn (!tcConfig, parsedScriptAst, pathOfMetaCommandSource)
- tcConfig := tcConfigResult // We accumulate the tcConfig in order to collect assembly references
+ let tcConfigResult, noWarns = ApplyMetaCommandsFromInputToTcConfigAndGatherNoWarn (tcConfig, parsedScriptAst, pathOfMetaCommandSource)
+ tcConfig <- tcConfigResult // We accumulate the tcConfig in order to collect assembly references
- let postSources = (!tcConfig).GetAvailableLoadedSources()
+ let postSources = tcConfig.GetAvailableLoadedSources()
let sources = if preSources.Length < postSources.Length then postSources.[preSources.Length..] else []
//for (_, subFile) in sources do
@@ -5094,7 +5094,7 @@ module private ScriptPreprocessClosure =
yield ClosureFile(subFile, m, None, [], [], [])
//printfn "yielding source %s" filename
- yield ClosureFile(filename, m, Some parsedScriptAst, parseDiagnostics, errorLogger.Diagnostics, !noWarns)
+ yield ClosureFile(filename, m, Some parsedScriptAst, parseDiagnostics, errorLogger.Diagnostics, noWarns)
| None ->
//printfn "yielding source %s (failed parse)" filename
@@ -5104,7 +5104,7 @@ module private ScriptPreprocessClosure =
//printfn "yielding non-script source %s" filename
yield ClosureFile(filename, m, None, [], [], []) ]
- closureSources |> List.collect loop, !tcConfig
+ closureSources |> List.collect loop, tcConfig
/// Reduce the full directive closure into LoadClosure
let GetLoadClosure(ctok, rootFilename, closureFiles, tcConfig: TcConfig, codeContext) =
diff --git a/src/fsharp/CompileOps.fsi b/src/fsharp/CompileOps.fsi
index 230bd508de5..a0b031092ac 100644
--- a/src/fsharp/CompileOps.fsi
+++ b/src/fsharp/CompileOps.fsi
@@ -204,7 +204,7 @@ type AssemblyResolution =
/// Whether or not this is an installed system assembly (for example, System.dll)
sysdir: bool
// Lazily populated ilAssemblyRef for this reference.
- ilAssemblyRef: ILAssemblyRef option ref }
+ mutable ilAssemblyRef: ILAssemblyRef option }
type UnresolvedAssemblyReference = UnresolvedAssemblyReference of string * AssemblyReference list
diff --git a/src/fsharp/CompileOptions.fs b/src/fsharp/CompileOptions.fs
index 29e854a83f1..441df5f3468 100644
--- a/src/fsharp/CompileOptions.fs
+++ b/src/fsharp/CompileOptions.fs
@@ -1015,7 +1015,7 @@ let testFlag tcConfigB =
match s with
| "StackSpan" -> tcConfigB.internalTestSpanStackReferring <- true
| "ErrorRanges" -> tcConfigB.errorStyle <- ErrorStyle.TestErrors
- | "Tracking" -> Lib.tracking := true (* general purpose on/off diagnostics flag *)
+ | "Tracking" -> Lib.tracking <- true (* general purpose on/off diagnostics flag *)
| "NoNeedToTailcall" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportNoNeedToTailcall = true }
| "FunctionSizes" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportFunctionSizes = true }
| "TotalSizes" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportTotalSizes = true }
@@ -1242,7 +1242,7 @@ let compilingFsLibFlag (tcConfigB: TcConfigBuilder) =
tcConfigB.compilingFslib <- true
tcConfigB.TurnWarningOff(rangeStartup, "42")
ErrorLogger.reportLibraryOnlyFeatures <- false
- IlxSettings.ilxCompilingFSharpCoreLib := true),
+ IlxSettings.ilxCompilingFSharpCoreLib <- true),
Some(InternalCommandLineOption("--compiling-fslib", rangeCmdArgs)), None)
let compilingFsLib20Flag =
@@ -1312,7 +1312,7 @@ let deprecatedFlagsFsc tcConfigB =
CompilerOption
("progress", tagNone,
- OptionUnit (fun () -> progress := true),
+ OptionUnit (fun () -> progress <- true),
Some(DeprecatedCommandLineOptionNoDescription("--progress", rangeCmdArgs)), None)
compilingFsLibFlag tcConfigB
@@ -1588,14 +1588,13 @@ let ApplyCommandLineArgs(tcConfigB: TcConfigBuilder, sourceFiles: string list, c
// PrintWholeAssemblyImplementation
//----------------------------------------------------------------------------
-let showTermFileCount = ref 0
+let mutable showTermFileCount = 0
let PrintWholeAssemblyImplementation g (tcConfig:TcConfig) outfile header expr =
if tcConfig.showTerms then
if tcConfig.writeTermsToFiles then
let filename = outfile + ".terms"
- let n = !showTermFileCount
- showTermFileCount := n+1
- use f = System.IO.File.CreateText (filename + "-" + string n + "-" + header)
+ use f = System.IO.File.CreateText (filename + "-" + string showTermFileCount + "-" + header)
+ showTermFileCount <- showTermFileCount + 1
Layout.outL f (Layout.squashTo 192 (DebugPrint.implFilesL g expr))
else
dprintf "\n------------------\nshowTerm: %s:\n" header
@@ -1606,11 +1605,11 @@ let PrintWholeAssemblyImplementation g (tcConfig:TcConfig) outfile header expr =
// ReportTime
//----------------------------------------------------------------------------
-let tPrev = ref None
-let nPrev = ref None
+let mutable tPrev = None
+let mutable nPrev = None
let ReportTime (tcConfig:TcConfig) descr =
- match !nPrev with
+ match nPrev with
| None -> ()
| Some prevDescr ->
if tcConfig.pause then
@@ -1651,7 +1650,7 @@ let ReportTime (tcConfig:TcConfig) descr =
let ptime = System.Diagnostics.Process.GetCurrentProcess()
let wsNow = ptime.WorkingSet64/1000000L
- match !tPrev, !nPrev with
+ match tPrev, nPrev with
| Some (timePrev, gcPrev:int []), Some prevDescr ->
let spanGC = [| for i in 0 .. maxGen -> System.GC.CollectionCount i - gcPrev.[i] |]
dprintf "TIME: %4.1f Delta: %4.1f Mem: %3d"
@@ -1662,9 +1661,9 @@ let ReportTime (tcConfig:TcConfig) descr =
prevDescr
| _ -> ()
- tPrev := Some (timeNow, gcNow)
+ tPrev <- Some (timeNow, gcNow)
- nPrev := Some descr
+ nPrev <- Some descr
//----------------------------------------------------------------------------
// OPTIMIZATION - support - addDllToOptEnv
diff --git a/src/fsharp/ConstraintSolver.fs b/src/fsharp/ConstraintSolver.fs
index f5aade84008..e22437de141 100644
--- a/src/fsharp/ConstraintSolver.fs
+++ b/src/fsharp/ConstraintSolver.fs
@@ -1710,9 +1710,9 @@ and AddConstraint (csenv: ConstraintSolverEnv) ndeep m2 trace tp newConstraint
// This works because the types on the r.h.s. of subtype
// constraints are head-types and so any further inferences are equational.
let collect ty =
- let res = ref []
- IterateEntireHierarchyOfType (fun x -> res := x :: !res) g amap m AllowMultiIntfInstantiations.No ty
- List.rev !res
+ let mutable res = []
+ IterateEntireHierarchyOfType (fun x -> res <- x :: res) g amap m AllowMultiIntfInstantiations.No ty
+ List.rev res
let parents1 = collect ty1
let parents2 = collect ty2
trackErrors {
diff --git a/src/fsharp/ErrorLogger.fs b/src/fsharp/ErrorLogger.fs
index c37a3395121..2f515d566a8 100755
--- a/src/fsharp/ErrorLogger.fs
+++ b/src/fsharp/ErrorLogger.fs
@@ -441,8 +441,8 @@ let PushThreadBuildPhaseUntilUnwind (phase:BuildPhase) =
let PushErrorLoggerPhaseUntilUnwind(errorLoggerTransformer : ErrorLogger -> #ErrorLogger) =
let oldErrorLogger = CompileThreadStatic.ErrorLogger
let newErrorLogger = errorLoggerTransformer oldErrorLogger
- let newInstalled = ref true
- let newIsInstalled() = if !newInstalled then () else (assert false; (); (*failwith "error logger used after unwind"*)) // REVIEW: ok to throw?
+ let mutable newInstalled = true
+ let newIsInstalled() = if newInstalled then () else (assert false; (); (*failwith "error logger used after unwind"*)) // REVIEW: ok to throw?
let chkErrorLogger = { new ErrorLogger("PushErrorLoggerPhaseUntilUnwind") with
member __.DiagnosticSink(phasedError, isError) = newIsInstalled(); newErrorLogger.DiagnosticSink(phasedError, isError)
member __.ErrorCount = newIsInstalled(); newErrorLogger.ErrorCount }
@@ -452,7 +452,7 @@ let PushErrorLoggerPhaseUntilUnwind(errorLoggerTransformer : ErrorLogger -> #Err
{ new System.IDisposable with
member __.Dispose() =
CompileThreadStatic.ErrorLogger <- oldErrorLogger
- newInstalled := false }
+ newInstalled <- false }
let SetThreadBuildPhaseNoUnwind(phase:BuildPhase) = CompileThreadStatic.BuildPhase <- phase
let SetThreadErrorLoggerNoUnwind errorLogger = CompileThreadStatic.ErrorLogger <- errorLogger
diff --git a/src/fsharp/FSharp.Build/FSharp.Build.fsproj b/src/fsharp/FSharp.Build/FSharp.Build.fsproj
index 76b9c7043e9..f427453df99 100644
--- a/src/fsharp/FSharp.Build/FSharp.Build.fsproj
+++ b/src/fsharp/FSharp.Build/FSharp.Build.fsproj
@@ -26,6 +26,7 @@
+
diff --git a/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs b/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs
index 63c9bf12457..3428d60ad8f 100644
--- a/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs
+++ b/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs
@@ -394,18 +394,18 @@ open Printf
stringInfos |> Seq.iter (fun (lineNum, (optErrNum,ident), str, holes, netFormatString) ->
let formalArgs = new System.Text.StringBuilder()
let actualArgs = new System.Text.StringBuilder()
- let firstTime = ref true
- let n = ref 0
+ let mutable firstTime = true
+ let mutable n = 0
formalArgs.Append "(" |> ignore
for hole in holes do
- if !firstTime then
- firstTime := false
+ if firstTime then
+ firstTime <- false
else
formalArgs.Append ", " |> ignore
actualArgs.Append " " |> ignore
- formalArgs.Append(sprintf "a%d : %s" !n hole) |> ignore
- actualArgs.Append(sprintf "a%d" !n) |> ignore
- n := !n + 1
+ formalArgs.Append(sprintf "a%d : %s" n hole) |> ignore
+ actualArgs.Append(sprintf "a%d" n) |> ignore
+ n <- n + 1
formalArgs.Append ")" |> ignore
fprintfn out " /// %s" str
fprintfn out " /// (Originally from %s:%d)" filename (lineNum+1)
diff --git a/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets b/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets
index 8421c8ea504..0b51d7204f6 100644
--- a/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets
+++ b/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets
@@ -29,6 +29,7 @@ this file.
+
true
diff --git a/src/fsharp/FSharp.Build/SubstituteText.fs b/src/fsharp/FSharp.Build/SubstituteText.fs
new file mode 100644
index 00000000000..16b8eab5245
--- /dev/null
+++ b/src/fsharp/FSharp.Build/SubstituteText.fs
@@ -0,0 +1,92 @@
+// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information.
+
+namespace FSharp.Build
+
+open System
+open System.Collections
+open System.IO
+open Microsoft.Build.Framework
+open Microsoft.Build.Utilities
+
+type SubstituteText () =
+
+ let mutable _buildEngine : IBuildEngine = null
+ let mutable _hostObject : ITaskHost = null
+
+ let mutable copiedFiles = new ResizeArray()
+ let mutable embeddedResources : ITaskItem[] = [||]
+
+ []
+ member this.EmbeddedResources
+ with get() = embeddedResources
+ and set(value) = embeddedResources <- value
+
+ [