diff --git a/FSharpBuild.Directory.Build.targets b/FSharpBuild.Directory.Build.targets index 7548cef7acf..3132ba1c970 100644 --- a/FSharpBuild.Directory.Build.targets +++ b/FSharpBuild.Directory.Build.targets @@ -1,4 +1,5 @@ + @@ -8,7 +9,7 @@ + BeforeTargets="AssignTargetPaths;BeforeBuild;GenerateFSharpTextResources"> <__TargetFilePath>@(NoneSubstituteText->'$(IntermediateOutputPath)%(Filename)%(Extension)') @@ -20,16 +21,19 @@ <_CopyToOutputDirectory Condition="'%(NoneSubstituteText.CopyToOutputDirectory)' != ''">%(NoneSubstituteText.CopyToOutputDirectory) <_CopyToOutputDirectory Condition="'%(NoneSubstituteText.CopyToOutputDirectory)' == ''">Never + + <_IncludeInVsix>false + <_IncludeInVsix Condition="'%(NoneSubstituteText.IncludeInVsix)' == 'true'">true - - + + @@ -61,4 +65,27 @@ + + + + + + + + + + + + + + + + + + + diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ce181f02b84..4f40053c4cc 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,9 +3,9 @@ - + https://github.com/dotnet/arcade - d2d025c1de37b1258f147851cb3e7373ad5ff09d + d4a1ce6278134f5dc25843e228d0498203031e61 diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1 index 5442eff3861..b8d84607b89 100644 --- a/eng/common/CheckSymbols.ps1 +++ b/eng/common/CheckSymbols.ps1 @@ -5,12 +5,11 @@ param( ) Add-Type -AssemblyName System.IO.Compression.FileSystem -. $PSScriptRoot\pipeline-logging-functions.ps1 function FirstMatchingSymbolDescriptionOrDefault { param( [string] $FullPath, # Full path to the module that has to be checked - [string] $TargetServerParameter, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols [string] $SymbolsPath ) @@ -22,36 +21,36 @@ function FirstMatchingSymbolDescriptionOrDefault { # checking and which type of file was uploaded. # The file itself is returned - $SymbolPath = $SymbolsPath + '\' + $FileName + $SymbolPath = $SymbolsPath + "\" + $FileName # PDB file for the module - $PdbPath = $SymbolPath.Replace($Extension, '.pdb') + $PdbPath = $SymbolPath.Replace($Extension, ".pdb") # PDB file for R2R module (created by crossgen) - $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb') + $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb") # DBG file for a .so library - $SODbg = $SymbolPath.Replace($Extension, '.so.dbg') + $SODbg = $SymbolPath.Replace($Extension, ".so.dbg") # DWARF file for a .dylib - $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') + $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf") - .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParameter $FullPath -o $SymbolsPath | Out-Null + .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null if (Test-Path $PdbPath) { - return 'PDB' + return "PDB" } elseif (Test-Path $NGenPdb) { - return 'NGen PDB' + return "NGen PDB" } elseif (Test-Path $SODbg) { - return 'DBG for SO' + return "DBG for SO" } elseif (Test-Path $DylibDwarf) { - return 'Dwarf for Dylib' + return "Dwarf for Dylib" } elseif (Test-Path $SymbolPath) { - return 'Module' + return "Module" } else { return $null @@ -69,7 +68,7 @@ function CountMissingSymbols { } # Extensions for which we'll look for symbols - $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib') + $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib") # How many files are missing symbol information $MissingSymbols = 0 @@ -77,7 +76,7 @@ function CountMissingSymbols { $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) $PackageGuid = New-Guid $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid - $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols' + $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols" [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) @@ -87,31 +86,31 @@ function CountMissingSymbols { Get-ChildItem -Recurse $ExtractPath | Where-Object {$RelevantExtensions -contains $_.Extension} | ForEach-Object { - if ($_.FullName -Match '\\ref\\') { + if ($_.FullName -Match "\\ref\\") { Write-Host "`t Ignoring reference assembly file" $_.FullName return } - $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--microsoft-symbol-server' -SymbolsPath $SymbolsPath - $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault -FullPath $_.FullName -TargetServerParameter '--internal-server' -SymbolsPath $SymbolsPath + $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath + $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath Write-Host -NoNewLine "`t Checking file" $_.FullName "... " if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { - Write-Host "Symbols found on MSDL (${$SymbolsOnMSDL}) and SymWeb (${$SymbolsOnSymWeb})" + Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")" } else { $MissingSymbols++ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { - Write-Host 'No symbols found on MSDL or SymWeb!' + Write-Host "No symbols found on MSDL or SymWeb!" } else { if ($SymbolsOnMSDL -eq $null) { - Write-Host 'No symbols found on MSDL!' + Write-Host "No symbols found on MSDL!" } else { - Write-Host 'No symbols found on SymWeb!' + Write-Host "No symbols found on SymWeb!" } } } @@ -130,26 +129,26 @@ function CheckSymbolsAvailable { Get-ChildItem "$InputPath\*.nupkg" | ForEach-Object { $FileName = $_.Name - + # These packages from Arcade-Services include some native libraries that # our current symbol uploader can't handle. Below is a workaround until # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. - if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') { + if ($FileName -Match "Microsoft\.DotNet\.Darc\.") { Write-Host "Ignoring Arcade-services file: $FileName" Write-Host return } - elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') { + elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") { Write-Host "Ignoring Arcade-services file: $FileName" Write-Host return } - + Write-Host "Validating $FileName " $Status = CountMissingSymbols "$InputPath\$FileName" if ($Status -ne 0) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $Status modules in the package $FileName" + Write-Error "Missing symbols for $Status modules in the package $FileName" } Write-Host diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj index 311e2bbe0f6..5d55e312b01 100644 --- a/eng/common/PublishToSymbolServers.proj +++ b/eng/common/PublishToSymbolServers.proj @@ -37,8 +37,6 @@ 3650 true - true - true false @@ -58,7 +56,7 @@ DryRun="false" ConvertPortablePdbsToWindowsPdbs="false" PdbConversionTreatAsWarning="" - Condition="$(PublishToSymbolServer) and $(PublishToMSDL)"/> + Condition="$(PublishToSymbolServer)"/> git commit --> git push - Write-Host 'git add .' - git add . - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`"" - git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName" - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - Write-Host 'git push' - git push - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } +# We create the temp directory where we'll store the sdl-config repository +$sdlDir = Join-Path $env:TEMP "sdl" +if (Test-Path $sdlDir) { + Remove-Item -Force -Recurse $sdlDir +} - # Return to the original directory - Pop-Location +Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir" +git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir +if ($LASTEXITCODE -ne 0) { + Write-Error "Git clone failed with exit code $LASTEXITCODE." +} +# We copy the .gdn folder from our local run into the git repository so it can be committed +$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn" +if (Get-Command Robocopy) { + Robocopy /S $GdnFolder $sdlRepositoryFolder +} else { + rsync -r $GdnFolder $sdlRepositoryFolder } -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} \ No newline at end of file +# cd to the sdl-config directory so we can run git there +Push-Location $sdlDir +# git add . --> git commit --> git push +Write-Host "git add ." +git add . +if ($LASTEXITCODE -ne 0) { + Write-Error "Git add failed with exit code $LASTEXITCODE." +} +Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`"" +git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName" +if ($LASTEXITCODE -ne 0) { + Write-Error "Git commit failed with exit code $LASTEXITCODE." +} +Write-Host "git push" +git push +if ($LASTEXITCODE -ne 0) { + Write-Error "Git push failed with exit code $LASTEXITCODE." +} + +# Return to the original directory +Pop-Location \ No newline at end of file diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1 index 40a084f7969..9bc25314ae2 100644 --- a/eng/common/sdl/run-sdl.ps1 +++ b/eng/common/sdl/run-sdl.ps1 @@ -5,65 +5,55 @@ Param( [string] $GdnFolder, [string[]] $ToolsList, [string] $UpdateBaseline, - [string] $GuardianLoggerLevel='Standard', + [string] $GuardianLoggerLevel="Standard", [string[]] $CrScanAdditionalRunConfigParams, [string[]] $PoliCheckAdditionalRunConfigParams ) -$ErrorActionPreference = 'Stop' +$ErrorActionPreference = "Stop" Set-StrictMode -Version 2.0 -$disableConfigureToolsetImport = $true $LASTEXITCODE = 0 -try { - . $PSScriptRoot\..\tools.ps1 +# We store config files in the r directory of .gdn +Write-Host $ToolsList +$gdnConfigPath = Join-Path $GdnFolder "r" +$ValidPath = Test-Path $GuardianCliLocation - # We store config files in the r directory of .gdn - Write-Host $ToolsList - $gdnConfigPath = Join-Path $GdnFolder 'r' - $ValidPath = Test-Path $GuardianCliLocation - - if ($ValidPath -eq $False) - { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." - ExitWithExitCode 1 - } - - $configParam = @('--config') +if ($ValidPath -eq $False) +{ + Write-Host "Invalid Guardian CLI Location." + exit 1 +} - foreach ($tool in $ToolsList) { - $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig" - Write-Host $tool - # We have to manually configure tools that run on source to look at the source directory only - if ($tool -eq 'credscan') { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } +$configParam = @("--config") + +foreach ($tool in $ToolsList) { + $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig" + Write-Host $tool + # We have to manually configure tools that run on source to look at the source directory only + if ($tool -eq "credscan") { + Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})" + & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams}) + if ($LASTEXITCODE -ne 0) { + Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE." + exit $LASTEXITCODE } - if ($tool -eq 'policheck') { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } + } + if ($tool -eq "policheck") { + Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})" + & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams}) + if ($LASTEXITCODE -ne 0) { + Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE." + exit $LASTEXITCODE } - - $configParam+=$gdnConfigFile } - Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam" - & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } + $configParam+=$gdnConfigFile +} + +Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam" +& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam +if ($LASTEXITCODE -ne 0) { + Write-Host "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE." + exit $LASTEXITCODE } -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} \ No newline at end of file diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 2973bcaf3a4..52e2ff021d7 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -40,13 +40,13 @@ jobs: itemPattern: "**" downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts + -InputPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts + -ExtractPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts displayName: Extract Blob Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts + -InputPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts + -ExtractPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts displayName: Extract Package Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} - task: NuGetToolInstaller@1 diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index ecebd0f03eb..ffda80a197b 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -1,33 +1,67 @@ -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - parameters: # Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job cancelTimeoutInMinutes: '' + condition: '' - container: '' + continueOnError: false + + container: '' + dependsOn: '' + displayName: '' - pool: '' + steps: [] + + pool: '' + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - artifacts: '' + # Job base template specific parameters + # Optional: Enable installing Microbuild plugin + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _TeamName - the name of your team + # _SignType - 'test' or 'real' enableMicrobuild: false + + # Optional: Include PublishBuildArtifacts task enablePublishBuildArtifacts: false + + # Optional: Enable publishing to the build asset registry enablePublishBuildAssets: false - enablePublishTestResults: false + + # Optional: Prevent gather/push manifest from executing when using publishing pipelines enablePublishUsingPipelines: false + + # Optional: Include PublishTestResults task + enablePublishTestResults: false + + # Optional: enable sending telemetry + enableTelemetry: false + + # Optional: define the helix repo for telemetry (example: 'dotnet/arcade') + helixRepo: '' + + # Optional: define the helix type for telemetry (example: 'build/product/') + helixType: '' + + # Required: name of the job name: '' - preSteps: [] + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. runAsPublic: false +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + jobs: - job: ${{ parameters.name }} @@ -59,7 +93,7 @@ jobs: timeoutInMinutes: ${{ parameters.timeoutInMinutes }} variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - ${{ if eq(parameters.enableTelemetry, 'true') }}: - name: DOTNET_CLI_TELEMETRY_PROFILE value: '$(Build.Repository.Uri)' - ${{ each variable in parameters.variables }}: @@ -91,12 +125,21 @@ jobs: workspace: ${{ parameters.workspace }} steps: - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions + - task: sendStartTelemetry@0 + displayName: 'Send Helix Start Telemetry' + inputs: + helixRepo: ${{ parameters.helixRepo }} + ${{ if ne(parameters.helixType, '') }}: + helixType: ${{ parameters.helixType }} + buildConfig: $(_BuildConfig) + runAsPublic: ${{ parameters.runAsPublic }} + continueOnError: ${{ parameters.continueOnError }} + condition: always() - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - task: MicroBuildSigningPlugin@2 displayName: Install MicroBuild plugin inputs: @@ -108,16 +151,9 @@ jobs: continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - task: NuGetAuthenticate@0 - - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - ${{ each step in parameters.steps }}: - ${{ step }} @@ -130,60 +166,20 @@ jobs: env: TeamName: $(_TeamName) - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - task: PublishBuildArtifacts@1 - displayName: Publish pipeline artifacts - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}: - - publish: artifacts/log - artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: Publish logs - continueOnError: true - condition: always() - - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: CopyFiles@2 - displayName: Gather Asset Manifests - inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' - TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests' - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) - - - task: PublishBuildArtifacts@1 - displayName: Push Asset Manifests - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests' - PublishLocation: Container - ArtifactName: AssetManifests - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions + - task: sendEndTelemetry@0 + displayName: 'Send Helix End Telemetry' + continueOnError: ${{ parameters.continueOnError }} + condition: always() + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - task: PublishBuildArtifacts@1 displayName: Publish Logs inputs: PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + ArtifactName: $(Agent.Os)_$(Agent.JobName) continueOnError: true condition: always() diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml index c08225a9a97..6a2f98c036f 100644 --- a/eng/common/templates/jobs/jobs.yml +++ b/eng/common/templates/jobs/jobs.yml @@ -1,10 +1,19 @@ parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + # Optional: 'true' if failures in job.yml job should not fail the job continueOnError: false + # Optional: Enable installing Microbuild plugin + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _TeamName - the name of your team + # _SignType - 'test' or 'real' + enableMicrobuild: false + # Optional: Include PublishBuildArtifacts task enablePublishBuildArtifacts: false + # Optional: Enable publishing to the build asset registry + enablePublishBuildAssets: false + # Optional: Enable publishing using release pipelines enablePublishUsingPipelines: false @@ -14,9 +23,19 @@ parameters: # Optional: Include toolset dependencies in the generated graph files includeToolset: false + # Optional: Include PublishTestResults task + enablePublishTestResults: false + + # Optional: enable sending telemetry + # if enabled then the 'helixRepo' parameter should also be specified + enableTelemetry: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job jobs: [] + # Optional: define the helix repo for telemetry (example: 'dotnet/arcade') + helixRepo: '' + # Optional: Override automatically derived dependsOn value for "publish build assets" job publishBuildAssetsDependsOn: '' @@ -43,30 +62,29 @@ jobs: name: ${{ job.job }} -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - template: ../job/publish-build-assets.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - pool: - vmImage: vs2017-win2016 - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - - - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}: - - template: ../job/generate-graph-files.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - includeToolset: ${{ parameters.graphFileGeneration.includeToolset }} - dependsOn: - - Asset_Registry_Publish - pool: - vmImage: vs2017-win2016 +- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + pool: + vmImage: vs2017-win2016 + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + +- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: ../job/generate-graph-files.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + includeToolset: ${{ parameters.graphFileGeneration.includeToolset }} + dependsOn: + - Asset_Registry_Publish + pool: + vmImage: vs2017-win2016 diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml index 3a8755fbbb7..ad9375f5e5c 100644 --- a/eng/common/templates/post-build/channels/generic-internal-channel.yml +++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml @@ -1,7 +1,4 @@ parameters: - artifactsPublishingAdditionalParameters: '' - dependsOn: - - Validate publishInstallersAndChecksums: false symbolPublishingAdditionalParameters: '' stageName: '' @@ -13,14 +10,14 @@ parameters: stages: - stage: ${{ parameters.stageName }} - dependsOn: ${{ parameters.dependsOn }} + dependsOn: validate variables: - template: ../common-variables.yml displayName: ${{ parameters.channelName }} Publishing jobs: - template: ../setup-maestro-vars.yml - - job: publish_symbols + - job: displayName: Symbol Publishing dependsOn: setupMaestroVars condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} )) @@ -29,6 +26,10 @@ stages: pool: vmImage: 'windows-2019' steps: + # This is necessary whenever we want to publish/restore to an AzDO private feed + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + - task: DownloadBuildArtifacts@0 displayName: Download Blob Artifacts inputs: @@ -41,18 +42,6 @@ stages: artifactName: 'PDBArtifacts' continueOnError: true - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' - - - task: PowerShell@2 - displayName: Enable cross-org publishing - inputs: - filePath: eng\common\enable-cross-org-publishing.ps1 - arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) - - task: PowerShell@2 displayName: Publish inputs: @@ -64,18 +53,14 @@ stages: /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' /p:Configuration=Release - /p:PublishToMSDL=false ${{ parameters.symbolPublishingAdditionalParameters }} - - template: ../../steps/publish-logs.yml - parameters: - StageLabel: '${{ parameters.stageName }}' - JobLabel: 'SymbolPublishing' - - job: publish_assets displayName: Publish Assets dependsOn: setupMaestroVars variables: + - group: DotNet-Blob-Feed + - group: AzureDevOps-Artifact-Feeds-Pats - name: BARBuildId value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] - name: IsStableBuild @@ -89,14 +74,13 @@ stages: inputs: buildType: current artifactName: PackageArtifacts - continueOnError: true - task: DownloadBuildArtifacts@0 displayName: Download Blob Artifacts inputs: buildType: current artifactName: BlobArtifacts - continueOnError: true + - task: DownloadBuildArtifacts@0 displayName: Download Asset Manifests inputs: @@ -140,6 +124,7 @@ stages: /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey) /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl) /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey) + /p:PublishToAzureDevOpsNuGetFeeds=true /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' @@ -149,11 +134,6 @@ stages: /p:PublishToMSDL=false ${{ parameters.artifactsPublishingAdditionalParameters }} - - template: ../../steps/publish-logs.yml - parameters: - StageLabel: '${{ parameters.stageName }}' - JobLabel: 'AssetsPublishing' - - template: ../../steps/promote-build.yml parameters: ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml index 3f572f8b12f..c4bc1897d81 100644 --- a/eng/common/templates/post-build/channels/generic-public-channel.yml +++ b/eng/common/templates/post-build/channels/generic-public-channel.yml @@ -1,7 +1,5 @@ parameters: artifactsPublishingAdditionalParameters: '' - dependsOn: - - Validate publishInstallersAndChecksums: false symbolPublishingAdditionalParameters: '' stageName: '' @@ -13,14 +11,14 @@ parameters: stages: - stage: ${{ parameters.stageName }} - dependsOn: ${{ parameters.dependsOn }} + dependsOn: validate variables: - template: ../common-variables.yml displayName: ${{ parameters.channelName }} Publishing jobs: - template: ../setup-maestro-vars.yml - - job: publish_symbols + - job: displayName: Symbol Publishing dependsOn: setupMaestroVars condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} )) @@ -66,11 +64,6 @@ stages: /p:Configuration=Release ${{ parameters.symbolPublishingAdditionalParameters }} - - template: ../../steps/publish-logs.yml - parameters: - StageLabel: '${{ parameters.stageName }}' - JobLabel: 'SymbolPublishing' - - job: publish_assets displayName: Publish Assets dependsOn: setupMaestroVars @@ -141,6 +134,7 @@ stages: /p:InstallersAzureAccountKey=$(dotnetcli-storage-key) /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl) /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key) + /p:PublishToAzureDevOpsNuGetFeeds=true /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' @@ -149,11 +143,6 @@ stages: /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' ${{ parameters.artifactsPublishingAdditionalParameters }} - - template: ../../steps/publish-logs.yml - parameters: - StageLabel: '${{ parameters.stageName }}' - JobLabel: 'AssetsPublishing' - - template: ../../steps/promote-build.yml parameters: ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml index 9505cf170f0..216d043e4e3 100644 --- a/eng/common/templates/post-build/common-variables.yml +++ b/eng/common/templates/post-build/common-variables.yml @@ -4,7 +4,7 @@ variables: - group: DotNet-DotNetCli-Storage - group: DotNet-MSRC-Storage - group: Publish-Build-Assets - + # .NET Core 3.1 Dev - name: PublicDevRelease_31_Channel_Id value: 128 @@ -49,10 +49,6 @@ variables: - name: NetCore_31_Blazor_Features_Channel_Id value: 531 - # .NET Core Experimental - - name: NetCore_Experimental_Channel_Id - value: 562 - # Whether the build is internal or not - name: IsInternalBuild value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index 8a8d84f2026..b121d77e07d 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -17,14 +17,11 @@ parameters: signingValidationAdditionalParameters: '' # Which stages should finish execution before post-build stages start - validateDependsOn: - - build - publishDependsOn: - - Validate + dependsOn: [build] stages: -- stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} +- stage: validate + dependsOn: ${{ parameters.dependsOn }} displayName: Validate jobs: - ${{ if eq(parameters.enableNugetValidation, 'true') }}: @@ -49,28 +46,20 @@ stages: - ${{ if eq(parameters.enableSigningValidation, 'true') }}: - job: displayName: Signing Validation - variables: - - template: common-variables.yml pool: vmImage: 'windows-2019' steps: - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: current - artifactName: PackageArtifacts - # This is necessary whenever we want to publish/restore to an AzDO private feed # Since sdk-task.ps1 tries to restore packages we need to do this authentication here # otherwise it'll complain about accessing a private feed. - task: NuGetAuthenticate@0 displayName: 'Authenticate to AzDO Feeds' - - task: PowerShell@2 - displayName: Enable cross-org publishing + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts inputs: - filePath: eng\common\enable-cross-org-publishing.ps1 - arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) + buildType: current + artifactName: PackageArtifacts - task: PowerShell@2 displayName: Validate @@ -79,13 +68,9 @@ stages: arguments: -task SigningValidation -restore -msbuildEngine dotnet /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + /p:Configuration=Release ${{ parameters.signingValidationAdditionalParameters }} - - template: ../steps/publish-logs.yml - parameters: - StageLabel: 'Validation' - JobLabel: 'Signing' - - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}: - job: displayName: SourceLink Validation @@ -121,7 +106,6 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'NetCore_Dev5_Publish' @@ -134,7 +118,18 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_Dev31_Publish' + channelName: '.NET Core 3.1 Dev' + channelId: 128 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'Net_Eng_Latest_Publish' @@ -147,7 +142,6 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'Net_Eng_Validation_Publish' @@ -160,7 +154,90 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_3_Tools_Validation_Publish' + channelName: '.NET 3 Tools - Validation' + channelId: 390 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_3_Tools_Publish' + channelName: '.NET 3 Tools' + channelId: 344 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_Release30_Publish' + channelName: '.NET Core 3.0 Release' + channelId: 19 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_Release31_Publish' + channelName: '.NET Core 3.1 Release' + channelId: 129 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_Blazor31_Features_Publish' + channelName: '.NET Core 3.1 Blazor Features' + channelId: 531 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_30_Internal_Servicing_Publishing' + channelName: '.NET Core 3.0 Internal Servicing' + channelId: 184 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NetCore_31_Internal_Servicing_Publishing' + channelName: '.NET Core 3.1 Internal Servicing' + channelId: 550 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'General_Testing_Publish' @@ -173,7 +250,6 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'NETCore_Tooling_Dev_Publishing' @@ -186,7 +262,6 @@ stages: - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} stageName: 'NETCore_Tooling_Release_Publishing' @@ -196,28 +271,74 @@ stages: shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NETCore_SDK_301xx_Publishing' + channelName: '.NET Core SDK 3.0.1xx' + channelId: 556 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-symbols/nuget/v3/index.json' + - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NET_Internal_Tooling_Publishing' - channelName: '.NET Internal Tooling' - channelId: 551 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json' + stageName: 'NETCore_SDK_301xx_Internal_Publishing' + channelName: '.NET Core SDK 3.0.1xx Internal' + channelId: 555 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-symbols/nuget/v3/index.json' - template: \eng\common\templates\post-build\channels\generic-public-channel.yml parameters: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - dependsOn: ${{ parameters.publishDependsOn }} publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_Experimental_Publishing' - channelName: '.NET Core Experimental' - channelId: 562 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json' + stageName: 'NETCore_SDK_311xx_Publishing' + channelName: '.NET Core SDK 3.1.1xx' + channelId: 560 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NETCore_SDK_311xx_Internal_Publishing' + channelName: '.NET Core SDK 3.1.1xx Internal' + channelId: 559 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-public-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NETCore_SDK_312xx_Publishing' + channelName: '.NET Core SDK 3.1.2xx' + channelId: 558 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' + +- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml + parameters: + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} + symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} + stageName: 'NETCore_SDK_312xx_Internal_Publishing' + channelName: '.NET Core SDK 3.1.2xx Internal' + channelId: 557 + transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' + shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' + symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' \ No newline at end of file diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml index 716b53f7405..56242b068e1 100644 --- a/eng/common/templates/post-build/setup-maestro-vars.yml +++ b/eng/common/templates/post-build/setup-maestro-vars.yml @@ -4,8 +4,6 @@ jobs: pool: vmImage: 'windows-2019' steps: - - checkout: none - - task: DownloadBuildArtifacts@0 displayName: Download Release Configs inputs: @@ -16,25 +14,5 @@ jobs: name: setReleaseVars displayName: Set Release Configs Vars inputs: - targetType: inline - script: | - try { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - - $Channels = "" - $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," } - - $IsStableBuild = $Content | Select -Index 2 - - Write-Host "##vso[task.setvariable variable=BARBuildId;isOutput=true]$BarId" - Write-Host "##vso[task.setvariable variable=InitialChannels;isOutput=true]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild;isOutput=true]$IsStableBuild" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } + filePath: $(Build.SourcesDirectory)/eng/common/post-build/setup-maestro-vars.ps1 + arguments: -ReleaseConfigsPath '$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt' diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml deleted file mode 100644 index f91751fe78e..00000000000 --- a/eng/common/templates/steps/publish-logs.yml +++ /dev/null @@ -1,23 +0,0 @@ -parameters: - StageLabel: '' - JobLabel: '' - -steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() - -- task: PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' - PublishLocation: Container - ArtifactName: PostBuildLogs - continueOnError: true - condition: always() diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 30becf01ea5..05df886f55f 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -23,7 +23,6 @@ parameters: EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int) Creator: '' # optional -- if the build is external, use this to specify who is sending the job DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() @@ -56,7 +55,6 @@ steps: DotNetCliVersion: ${{ parameters.DotNetCliVersion }} EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) @@ -87,7 +85,6 @@ steps: DotNetCliVersion: ${{ parameters.DotNetCliVersion }} EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index d3a432878e2..92a053bd16b 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -5,7 +5,7 @@ [bool]$ci = if (Test-Path variable:ci) { $ci } else { $false } # Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. -[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' } +[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" } # Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. # Binary log must be enabled on CI. @@ -24,7 +24,7 @@ [bool]$restore = if (Test-Path variable:restore) { $restore } else { $true } # Adjusts msbuild verbosity level. -[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' } +[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" } # Set to true to reuse msbuild nodes. Recommended to not reuse on CI. [bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci } @@ -41,23 +41,21 @@ # Enable repos to use a particular version of the on-line dotnet-install scripts. # default URL: https://dot.net/v1/dotnet-install.ps1 -[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } +[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { "v1" } # True to use global NuGet cache instead of restoring packages to repository-local directory. [bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci } # An array of names of processes to stop on script exit if prepareMachine is true. -$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') } - -$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null } +$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") } set-strictmode -version 2.0 -$ErrorActionPreference = 'Stop' +$ErrorActionPreference = "Stop" [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 function Create-Directory([string[]] $path) { if (!(Test-Path $path)) { - New-Item -path $path -force -itemType 'Directory' | Out-Null + New-Item -path $path -force -itemType "Directory" | Out-Null } } @@ -98,10 +96,7 @@ function Exec-Process([string]$command, [string]$commandArgs) { } } -# createSdkLocationFile parameter enables a file being generated under the toolset directory -# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations -# as dot sourcing isn't possible. -function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { +function InitializeDotNetCli([bool]$install) { if (Test-Path variable:global:_DotNetInstallDir) { return $global:_DotNetInstallDir } @@ -124,7 +119,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { # Find the first path on %PATH% that contains the dotnet.exe if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) { - $dotnetCmd = Get-Command 'dotnet.exe' -ErrorAction SilentlyContinue + $dotnetCmd = Get-Command "dotnet.exe" -ErrorAction SilentlyContinue if ($dotnetCmd -ne $null) { $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent } @@ -137,13 +132,13 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { $dotnetRoot = $env:DOTNET_INSTALL_DIR } else { - $dotnetRoot = Join-Path $RepoRoot '.dotnet' + $dotnetRoot = Join-Path $RepoRoot ".dotnet" if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) { if ($install) { InstallDotNetSdk $dotnetRoot $dotnetSdkVersion } else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" ExitWithExitCode 1 } } @@ -151,24 +146,6 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { $env:DOTNET_INSTALL_DIR = $dotnetRoot } - # Creates a temporary file under the toolset dir. - # The following code block is protecting against concurrent access so that this function can - # be called in parallel. - if ($createSdkLocationFile) { - do { - $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName()) - } - until (!(Test-Path $sdkCacheFileTemp)) - Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot - - try { - Rename-Item -Force -Path $sdkCacheFileTemp 'sdk.txt' - } catch { - # Somebody beat us - Remove-Item -Path $sdkCacheFileTemp - } - } - # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom # build steps from using anything other than what we've downloaded. # It also ensures that VS msbuild will use the downloaded sdk targets. @@ -177,6 +154,15 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build Write-PipelinePrependPath -Path $dotnetRoot + # Work around issues with Azure Artifacts credential provider + # https://github.com/dotnet/arcade/issues/3932 + if ($ci) { + $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 + $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' + } + Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1' @@ -184,7 +170,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { } function GetDotNetInstallScript([string] $dotnetRoot) { - $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' + $installScript = Join-Path $dotnetRoot "dotnet-install.ps1" if (!(Test-Path $installScript)) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit @@ -194,17 +180,17 @@ function GetDotNetInstallScript([string] $dotnetRoot) { return $installScript } -function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '') { +function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") { InstallDotNet $dotnetRoot $version $architecture } function InstallDotNet([string] $dotnetRoot, [string] $version, - [string] $architecture = '', - [string] $runtime = '', + [string] $architecture = "", + [string] $runtime = "", [bool] $skipNonVersionedFiles = $false, - [string] $runtimeSourceFeed = '', - [string] $runtimeSourceFeedKey = '') { + [string] $runtimeSourceFeed = "", + [string] $runtimeSourceFeedKey = "") { $installScript = GetDotNetInstallScript $dotnetRoot $installParameters = @{ @@ -220,7 +206,7 @@ function InstallDotNet([string] $dotnetRoot, & $installScript @installParameters } catch { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from public location." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from public location." # Only the runtime can be installed from a custom [private] location. if ($runtime -and ($runtimeSourceFeed -or $runtimeSourceFeedKey)) { @@ -236,7 +222,7 @@ function InstallDotNet([string] $dotnetRoot, & $installScript @installParameters } catch { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'." ExitWithExitCode 1 } } else { @@ -262,16 +248,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = } if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { '15.9' } + $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" } $vsMinVersion = [Version]::new($vsMinVersionStr) # Try msbuild command available in the environment. if ($env:VSINSTALLDIR -ne $null) { - $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue + $msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue if ($msbuildCmd -ne $null) { # Workaround for https://github.com/dotnet/roslyn/issues/35793 # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+ - $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0]) + $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0]) if ($msbuildVersion -ge $vsMinVersion) { return $global:_MSBuildExe = $msbuildCmd.Path @@ -291,20 +277,17 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion } else { - if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') { + if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") { $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild' $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] } else { $vsMajorVersion = $vsMinVersion.Major $xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha" } - - $vsInstallDir = $null - if ($xcopyMSBuildVersion.Trim() -ine "none") { - $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install - } + + $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install if ($vsInstallDir -eq $null) { - throw 'Unable to find Visual Studio that has required version and components installed' + throw "Unable to find Visual Studio that has required version and components installed" } } @@ -328,7 +311,7 @@ function InstallXCopyMSBuild([string]$packageVersion) { } function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { - $packageName = 'RoslynTools.MSBuild' + $packageName = "RoslynTools.MSBuild" $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion" $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg" @@ -344,7 +327,7 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { Unzip $packagePath $packageDir } - return Join-Path $packageDir 'tools' + return Join-Path $packageDir "tools" } # @@ -361,32 +344,32 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { # or $null if no instance meeting the requirements is found on the machine. # function LocateVisualStudio([object]$vsRequirements = $null){ - if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { + if (Get-Member -InputObject $GlobalJson.tools -Name "vswhere") { $vswhereVersion = $GlobalJson.tools.vswhere } else { - $vswhereVersion = '2.5.2' + $vswhereVersion = "2.5.2" } $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" - $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe' + $vsWhereExe = Join-Path $vsWhereDir "vswhere.exe" if (!(Test-Path $vsWhereExe)) { Create-Directory $vsWhereDir - Write-Host 'Downloading vswhere' + Write-Host "Downloading vswhere" Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe } if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') + $args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild", "-products", "*") - if (Get-Member -InputObject $vsRequirements -Name 'version') { - $args += '-version' + if (Get-Member -InputObject $vsRequirements -Name "version") { + $args += "-version" $args += $vsRequirements.version } - if (Get-Member -InputObject $vsRequirements -Name 'components') { + if (Get-Member -InputObject $vsRequirements -Name "components") { foreach ($component in $vsRequirements.components) { - $args += '-requires' + $args += "-requires" $args += $component } } @@ -412,27 +395,27 @@ function InitializeBuildTool() { # Initialize dotnet cli if listed in 'tools' $dotnetRoot = $null - if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") { $dotnetRoot = InitializeDotNetCli -install:$restore } - if ($msbuildEngine -eq 'dotnet') { + if ($msbuildEngine -eq "dotnet") { if (!$dotnetRoot) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "/global.json must specify 'tools.dotnet'." ExitWithExitCode 1 } - $buildTool = @{ Path = Join-Path $dotnetRoot 'dotnet.exe'; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' } + $buildTool = @{ Path = Join-Path $dotnetRoot "dotnet.exe"; Command = "msbuild"; Tool = "dotnet"; Framework = "netcoreapp2.1" } } elseif ($msbuildEngine -eq "vs") { try { $msbuildPath = InitializeVisualStudioMSBuild -install:$restore } catch { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_ ExitWithExitCode 1 } $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" } } else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." ExitWithExitCode 1 } @@ -441,15 +424,15 @@ function InitializeBuildTool() { function GetDefaultMSBuildEngine() { # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows. - if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { - return 'vs' + if (Get-Member -InputObject $GlobalJson.tools -Name "vs") { + return "vs" } - if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { - return 'dotnet' + if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") { + return "dotnet" } - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." ExitWithExitCode 1 } @@ -458,9 +441,9 @@ function GetNuGetPackageCachePath() { # Use local cache on CI to ensure deterministic build, # use global cache in dev builds to avoid cost of downloading packages. if ($useGlobalNuGetCache) { - $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages' + $env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages" } else { - $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages' + $env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages" } } @@ -473,7 +456,7 @@ function GetSdkTaskProject([string]$taskName) { } function InitializeNativeTools() { - if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) { + if (Get-Member -InputObject $GlobalJson -Name "native-tools") { $nativeArgs= @{} if ($ci) { $nativeArgs = @{ @@ -502,14 +485,14 @@ function InitializeToolset() { } if (-not $restore) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored." + Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Toolset version $toolsetVersion has not been restored." ExitWithExitCode 1 } $buildTool = InitializeBuildTool - $proj = Join-Path $ToolsetDir 'restore.proj' - $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' } + $proj = Join-Path $ToolsetDir "restore.proj" + $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" } '' | Set-Content $proj @@ -531,7 +514,7 @@ function ExitWithExitCode([int] $exitCode) { } function Stop-Processes() { - Write-Host 'Killing running build processes...' + Write-Host "Killing running build processes..." foreach ($processName in $processesToStopOnExit) { Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process } @@ -548,18 +531,13 @@ function MSBuild() { # Work around issues with Azure Artifacts credential provider # https://github.com/dotnet/arcade/issues/3932 - if ($ci -and $buildTool.Tool -eq 'dotnet') { + if ($ci -and $buildTool.Tool -eq "dotnet") { dotnet nuget locals http-cache -c - - $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 - $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' } $toolsetBuildProject = InitializeToolset $path = Split-Path -parent $toolsetBuildProject - $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll') + $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll") $args += "/logger:$path" } @@ -574,12 +552,12 @@ function MSBuild() { function MSBuild-Core() { if ($ci) { if (!$binaryLog) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build.' + Write-PipelineTaskError -Message "Binary log must be enabled in CI build." ExitWithExitCode 1 } if ($nodeReuse) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.' + Write-PipelineTaskError -Message "Node reuse must be disabled in CI build." ExitWithExitCode 1 } } @@ -589,10 +567,10 @@ function MSBuild-Core() { $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" if ($warnAsError) { - $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true' + $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true" } else { - $cmdArgs += ' /p:TreatWarningsAsErrors=false' + $cmdArgs += " /p:TreatWarningsAsErrors=false" } foreach ($arg in $args) { @@ -604,7 +582,7 @@ function MSBuild-Core() { $exitCode = Exec-Process $buildTool.Path $cmdArgs if ($exitCode -ne 0) { - Write-PipelineTelemetryError Category 'Build' -Message 'Build failed.' + Write-PipelineTaskError -Message "Build failed." $buildLog = GetMSBuildBinaryLogCommandLineArgument $args if ($buildLog -ne $null) { @@ -619,12 +597,12 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) { foreach ($argument in $arguments) { if ($argument -ne $null) { $arg = $argument.Trim() - if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) { - return $arg.Substring('/bl:'.Length) + if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) { + return $arg.Substring("/bl:".Length) } - if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) { - return $arg.Substring('/binaryLogger:'.Length) + if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) { + return $arg.Substring("/binaryLogger:".Length) } } } @@ -634,14 +612,14 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) { . $PSScriptRoot\pipeline-logging-functions.ps1 -$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..') -$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') -$ArtifactsDir = Join-Path $RepoRoot 'artifacts' -$ToolsetDir = Join-Path $ArtifactsDir 'toolset' -$ToolsDir = Join-Path $RepoRoot '.tools' -$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration -$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration -$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..") +$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..") +$ArtifactsDir = Join-Path $RepoRoot "artifacts" +$ToolsetDir = Join-Path $ArtifactsDir "toolset" +$ToolsDir = Join-Path $RepoRoot ".tools" +$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration +$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration +$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json # true if global.json contains a "runtimes" section $globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false } @@ -654,18 +632,3 @@ Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir Write-PipelineSetVariable -Name 'TMP' -Value $TempDir - -# Import custom tools configuration, if present in the repo. -# Note: Import in global scope so that the script set top-level variables without qualification. -if (!$disableConfigureToolsetImport) { - $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1' - if (Test-Path $configureToolsetScript) { - . $configureToolsetScript - if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) { - if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) { - Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code' - ExitWithExitCode $LastExitCode - } - } - } -} diff --git a/eng/common/tools.sh b/eng/common/tools.sh index e071af4ee49..94965a8fd2a 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -81,7 +81,7 @@ function ReadGlobalVersion { local pattern="\"$key\" *: *\"(.*)\"" if [[ ! $line =~ $pattern ]]; then - Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file" + Write-PipelineTelemetryError -category 'InitializeToolset' "Error: Cannot find \"$key\" in $global_json_file" ExitWithExitCode 1 fi @@ -152,6 +152,15 @@ function InitializeDotNetCli { # build steps from using anything other than what we've downloaded. Write-PipelinePrependPath -path "$dotnet_root" + # Work around issues with Azure Artifacts credential provider + # https://github.com/dotnet/arcade/issues/3932 + if [[ "$ci" == true ]]; then + export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 + export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 + Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" + Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" + fi + Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1" @@ -274,9 +283,6 @@ function GetNuGetPackageCachePath { } function InitializeNativeTools() { - if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then - return - fi if grep -Fq "native-tools" $global_json_file then local nativeArgs="" @@ -326,7 +332,7 @@ function InitializeToolset { local toolset_build_proj=`cat "$toolset_location_file"` if [[ ! -a "$toolset_build_proj" ]]; then - Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj" + Write-PipelineTelemetryError -category 'InitializeToolset' "Invalid toolset path: $toolset_build_proj" ExitWithExitCode 3 fi @@ -357,12 +363,7 @@ function MSBuild { # Work around issues with Azure Artifacts credential provider # https://github.com/dotnet/arcade/issues/3932 if [[ "$ci" == true ]]; then - "$_InitializeBuildTool" nuget locals http-cache -c - - export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 - export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 - Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" - Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" + dotnet nuget locals http-cache -c fi local toolset_dir="${_InitializeToolset%/*}" @@ -376,12 +377,12 @@ function MSBuild { function MSBuild-Core { if [[ "$ci" == true ]]; then if [[ "$binary_log" != true ]]; then - Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build." + Write-PipelineTaskError "Binary log must be enabled in CI build." ExitWithExitCode 1 fi if [[ "$node_reuse" == true ]]; then - Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build." + Write-PipelineTaskError "Node reuse must be disabled in CI build." ExitWithExitCode 1 fi fi @@ -395,7 +396,7 @@ function MSBuild-Core { "$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || { local exit_code=$? - Write-PipelineTelemetryError -category 'Build' "Build failed (exit code '$exit_code')." + Write-PipelineTaskError "Build failed (exit code '$exit_code')." ExitWithExitCode $exit_code } } @@ -436,18 +437,3 @@ Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir" Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir" Write-PipelineSetVariable -name "Temp" -value "$temp_dir" Write-PipelineSetVariable -name "TMP" -value "$temp_dir" - -# Import custom tools configuration, if present in the repo. -if [ -z "${disable_configure_toolset_import:-}" ]; then - configure_toolset_script="$eng_root/configure-toolset.sh" - if [[ -a "$configure_toolset_script" ]]; then - . "$configure_toolset_script" - fi -fi - -# TODO: https://github.com/dotnet/arcade/issues/1468 -# Temporary workaround to avoid breaking change. -# Remove once repos are updated. -if [[ -n "${useInstalledDotNetCli:-}" ]]; then - use_installed_dotnet_cli="$useInstalledDotNetCli" -fi diff --git a/fcs/build.fsx b/fcs/build.fsx index 1c2528dd2e0..af21298e7ab 100644 --- a/fcs/build.fsx +++ b/fcs/build.fsx @@ -66,16 +66,16 @@ Target.create "Build" (fun _ -> runDotnet __SOURCE_DIRECTORY__ "build" "../src/buildtools/buildtools.proj -v n -c Proto" let fslexPath = __SOURCE_DIRECTORY__ + "/../artifacts/bin/fslex/Proto/netcoreapp2.1/fslex.dll" let fsyaccPath = __SOURCE_DIRECTORY__ + "/../artifacts/bin/fsyacc/Proto/netcoreapp2.1/fsyacc.dll" - runDotnet __SOURCE_DIRECTORY__ "build" (sprintf "FSharp.Compiler.Service.sln -v n -c Release /p:FsLexPath=%s /p:FsYaccPath=%s" fslexPath fsyaccPath) + runDotnet __SOURCE_DIRECTORY__ "build" (sprintf "FSharp.Compiler.Service.sln -nodereuse:false -v n -c Release /p:DisableCompilerRedirection=true /p:FsLexPath=%s /p:FsYaccPath=%s" fslexPath fsyaccPath) ) Target.create "Test" (fun _ -> // This project file is used for the netcoreapp2.0 tests to work out reference sets - runDotnet __SOURCE_DIRECTORY__ "build" "../tests/projects/Sample_NETCoreSDK_FSharp_Library_netstandard2_0/Sample_NETCoreSDK_FSharp_Library_netstandard2_0.fsproj -v n /restore /p:DisableCompilerRedirection=true" + runDotnet __SOURCE_DIRECTORY__ "build" "../tests/projects/Sample_NETCoreSDK_FSharp_Library_netstandard2_0/Sample_NETCoreSDK_FSharp_Library_netstandard2_0.fsproj -nodereuse:false -v n /restore /p:DisableCompilerRedirection=true" // Now run the tests let logFilePath = Path.Combine(__SOURCE_DIRECTORY__, "..", "artifacts", "TestResults", "Release", "FSharp.Compiler.Service.Test.xml") - runDotnet __SOURCE_DIRECTORY__ "test" (sprintf "FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.Tests.fsproj --no-restore --no-build -v n -c Release --test-adapter-path . --logger \"nunit;LogFilePath=%s\"" logFilePath) + runDotnet __SOURCE_DIRECTORY__ "test" (sprintf "FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.Tests.fsproj --no-restore --no-build -nodereuse:false -v n -c Release --test-adapter-path . --logger \"nunit;LogFilePath=%s\"" logFilePath) ) Target.create "NuGet" (fun _ -> diff --git a/global.json b/global.json index 699ccb65f85..f12de763877 100644 --- a/global.json +++ b/global.json @@ -10,7 +10,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.19627.1", + "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19616.5", "Microsoft.DotNet.Helix.Sdk": "2.0.0-beta.19069.2" } } diff --git a/src/absil/ilascii.fs b/src/absil/ilascii.fs index 4bd48cb59b3..e5b7782ba7e 100644 --- a/src/absil/ilascii.fs +++ b/src/absil/ilascii.fs @@ -9,7 +9,7 @@ open FSharp.Compiler.AbstractIL.IL // set to the proper value at CompileOps.fs (BuildFrameworkTcImports) // Only relevant when compiling FSharp.Core.dll -let parseILGlobals = ref EcmaMscorlibILGlobals +let mutable parseILGlobals = EcmaMscorlibILGlobals /// Table of parsing and pretty printing data for instructions. let noArgInstrs = diff --git a/src/absil/ilascii.fsi b/src/absil/ilascii.fsi index 3a8543e4e1c..67a5c9eb4b1 100644 --- a/src/absil/ilascii.fsi +++ b/src/absil/ilascii.fsi @@ -14,7 +14,7 @@ open FSharp.Compiler.AbstractIL.IL // IL Parser state - must be initialized before parsing a module // -------------------------------------------------------------------- -val parseILGlobals: ILGlobals ref +val mutable parseILGlobals: ILGlobals // -------------------------------------------------------------------- // IL Lexer and pretty-printer tables diff --git a/src/absil/ildiag.fs b/src/absil/ildiag.fs index 1cd20ad8750..d43bdf8dca4 100644 --- a/src/absil/ildiag.fs +++ b/src/absil/ildiag.fs @@ -5,18 +5,18 @@ module internal FSharp.Compiler.AbstractIL.Diagnostics -let diagnosticsLog = ref (Some stdout) +let mutable diagnosticsLog = Some stdout -let setDiagnosticsChannel s = diagnosticsLog := s +let setDiagnosticsChannel s = diagnosticsLog <- s -let dflushn () = match !diagnosticsLog with None -> () | Some d -> d.WriteLine(); d.Flush() -let dflush () = match !diagnosticsLog with None -> () | Some d -> d.Flush() +let dflushn () = match diagnosticsLog with None -> () | Some d -> d.WriteLine(); d.Flush() +let dflush () = match diagnosticsLog with None -> () | Some d -> d.Flush() let dprintn (s:string) = - match !diagnosticsLog with None -> () | Some d -> d.Write s; d.Write "\n"; dflush() + match diagnosticsLog with None -> () | Some d -> d.Write s; d.Write "\n"; dflush() let dprintf (fmt: Format<_,_,_,_>) = - Printf.kfprintf dflush (match !diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt + Printf.kfprintf dflush (match diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt let dprintfn (fmt: Format<_,_,_,_>) = - Printf.kfprintf dflushn (match !diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt + Printf.kfprintf dflushn (match diagnosticsLog with None -> System.IO.TextWriter.Null | Some d -> d) fmt diff --git a/src/absil/illib.fs b/src/absil/illib.fs index 9a84e94e34a..33e0566a6c2 100644 --- a/src/absil/illib.fs +++ b/src/absil/illib.fs @@ -47,15 +47,15 @@ let LOH_SIZE_THRESHOLD_BYTES = 80_000 // Library: ReportTime //--------------------------------------------------------------------- let reportTime = - let tFirst = ref None - let tPrev = ref None + let mutable tFirst =None + let mutable tPrev = None fun showTimes descr -> if showTimes then let t = Process.GetCurrentProcess().UserProcessorTime.TotalSeconds - let prev = match !tPrev with None -> 0.0 | Some t -> t - let first = match !tFirst with None -> (tFirst := Some t; t) | Some t -> t + let prev = match tPrev with None -> 0.0 | Some t -> t + let first = match tFirst with None -> (tFirst <- Some t; t) | Some t -> t printf "ilwrite: TIME %10.3f (total) %10.3f (delta) - %s\n" (t - first) (t - prev) descr - tPrev := Some t + tPrev <- Some t //------------------------------------------------------------------------- // Library: projections @@ -573,10 +573,10 @@ module String = let getLines (str: string) = use reader = new StringReader(str) [| - let line = ref (reader.ReadLine()) - while not (isNull !line) do - yield !line - line := reader.ReadLine() + let mutable line = reader.ReadLine() + while not (isNull line) do + yield line + line <- reader.ReadLine() if str.EndsWithOrdinal("\n") then // last trailing space not returned // http://stackoverflow.com/questions/19365404/stringreader-omits-trailing-linebreak diff --git a/src/absil/ilpars.fsy b/src/absil/ilpars.fsy index 849c5f9fd13..e831677e50a 100644 --- a/src/absil/ilpars.fsy +++ b/src/absil/ilpars.fsy @@ -33,7 +33,7 @@ let resolveCurrentMethodSpecScope obj = let findSystemRuntimeAssemblyRef() = - match (!parseILGlobals).primaryAssemblyScopeRef with + match parseILGlobals.primaryAssemblyScopeRef with | ILScopeRef.Assembly aref -> aref | _ -> pfailwith "systemRuntimeScopeRef not set to valid assembly reference in parseILGlobals" @@ -235,9 +235,9 @@ callKind: *---------------------------------------------*/ typ: STRING - { noMethodSpecScope (!parseILGlobals).typ_String } + { noMethodSpecScope parseILGlobals.typ_String } | OBJECT - { noMethodSpecScope (!parseILGlobals).typ_Object } + { noMethodSpecScope parseILGlobals.typ_Object } | CLASS typeNameInst { resolveMethodSpecScopeThen $2 (fun tspec -> noMethodSpecScope (mkILBoxedType tspec)) } @@ -256,45 +256,45 @@ typ: STRING | typ STAR { resolveMethodSpecScopeThen $1 (fun ty -> noMethodSpecScope (ILType.Ptr ty)) } | CHAR - { noMethodSpecScope (!parseILGlobals).typ_Char } + { noMethodSpecScope parseILGlobals.typ_Char } | VOID { noMethodSpecScope ILType.Void } | BOOL - { noMethodSpecScope (!parseILGlobals).typ_Bool } + { noMethodSpecScope parseILGlobals.typ_Bool } | INT8 - { noMethodSpecScope (!parseILGlobals).typ_SByte } + { noMethodSpecScope parseILGlobals.typ_SByte } | INT16 - { noMethodSpecScope (!parseILGlobals).typ_Int16 } + { noMethodSpecScope parseILGlobals.typ_Int16 } | INT32 - { noMethodSpecScope (!parseILGlobals).typ_Int32 } + { noMethodSpecScope parseILGlobals.typ_Int32 } | INT64 - { noMethodSpecScope (!parseILGlobals).typ_Int64 } + { noMethodSpecScope parseILGlobals.typ_Int64 } | FLOAT32 - { noMethodSpecScope (!parseILGlobals).typ_Single } + { noMethodSpecScope parseILGlobals.typ_Single } | FLOAT64 - { noMethodSpecScope (!parseILGlobals).typ_Double } + { noMethodSpecScope parseILGlobals.typ_Double } | UNSIGNED INT8 - { noMethodSpecScope (!parseILGlobals).typ_Byte } + { noMethodSpecScope parseILGlobals.typ_Byte } | UNSIGNED INT16 - { noMethodSpecScope (!parseILGlobals).typ_UInt16 } + { noMethodSpecScope parseILGlobals.typ_UInt16 } | UNSIGNED INT32 - { noMethodSpecScope (!parseILGlobals).typ_UInt32 } + { noMethodSpecScope parseILGlobals.typ_UInt32 } | UNSIGNED INT64 - { noMethodSpecScope (!parseILGlobals).typ_UInt64 } + { noMethodSpecScope parseILGlobals.typ_UInt64 } | UINT8 - { noMethodSpecScope (!parseILGlobals).typ_Byte } + { noMethodSpecScope parseILGlobals.typ_Byte } | UINT16 - { noMethodSpecScope (!parseILGlobals).typ_UInt16 } + { noMethodSpecScope parseILGlobals.typ_UInt16 } | UINT32 - { noMethodSpecScope (!parseILGlobals).typ_UInt32 } + { noMethodSpecScope parseILGlobals.typ_UInt32 } | UINT64 - { noMethodSpecScope (!parseILGlobals).typ_UInt64 } + { noMethodSpecScope parseILGlobals.typ_UInt64 } | NATIVE INT - { noMethodSpecScope (!parseILGlobals).typ_IntPtr } + { noMethodSpecScope parseILGlobals.typ_IntPtr } | NATIVE UNSIGNED INT - { noMethodSpecScope (!parseILGlobals).typ_UIntPtr } + { noMethodSpecScope parseILGlobals.typ_UIntPtr } | NATIVE UINT - { noMethodSpecScope (!parseILGlobals).typ_UIntPtr } + { noMethodSpecScope parseILGlobals.typ_UIntPtr } | BANG int32 { noMethodSpecScope (ILType.TypeVar (uint16 ( $2))) } diff --git a/src/absil/ilread.fs b/src/absil/ilread.fs index fed858c4d50..aec47ac3be4 100644 --- a/src/absil/ilread.fs +++ b/src/absil/ilread.fs @@ -522,8 +522,8 @@ let instrs () = // The tables are delayed to avoid building them unnecessarily at startup // Many applications of AbsIL (e.g. a compiler) don't need to read instructions. -let oneByteInstrs = ref None -let twoByteInstrs = ref None +let mutable oneByteInstrs = None +let mutable twoByteInstrs = None let fillInstrs () = let oneByteInstrTable = Array.create 256 I_invalid_instr let twoByteInstrTable = Array.create 256 I_invalid_instr @@ -542,16 +542,16 @@ let fillInstrs () = oneByteInstrTable.[i] <- f List.iter addInstr (instrs()) List.iter (fun (x, mk) -> addInstr (x, I_none_instr (noPrefixes mk))) (noArgInstrs.Force()) - oneByteInstrs := Some oneByteInstrTable - twoByteInstrs := Some twoByteInstrTable + oneByteInstrs <- Some oneByteInstrTable + twoByteInstrs <- Some twoByteInstrTable let rec getOneByteInstr i = - match !oneByteInstrs with + match oneByteInstrs with | None -> fillInstrs(); getOneByteInstr i | Some t -> t.[i] let rec getTwoByteInstr i = - match !twoByteInstrs with + match twoByteInstrs with | None -> fillInstrs(); getTwoByteInstr i | Some t -> t.[i] @@ -680,20 +680,20 @@ type GenericParamsIdx = GenericParamsIdx of int * TypeOrMethodDefTag * int let mkCacheInt32 lowMem _inbase _nm _sz = if lowMem then (fun f x -> f x) else - let cache = ref null - let count = ref 0 + let mutable cache = null + let mutable count = 0 #if STATISTICS addReport (fun oc -> if !count <> 0 then oc.WriteLine ((_inbase + string !count + " "+ _nm + " cache hits"): string)) #endif fun f (idx: int32) -> let cache = - match !cache with - | null -> cache := new Dictionary(11) + match cache with + | null -> cache <- new Dictionary(11) | _ -> () - !cache + cache match cache.TryGetValue idx with | true, res -> - incr count + count <- count + 1 res | _ -> let res = f idx @@ -702,20 +702,20 @@ let mkCacheInt32 lowMem _inbase _nm _sz = let mkCacheGeneric lowMem _inbase _nm _sz = if lowMem then (fun f x -> f x) else - let cache = ref null - let count = ref 0 + let mutable cache = null + let mutable count = 0 #if STATISTICS addReport (fun oc -> if !count <> 0 then oc.WriteLine ((_inbase + string !count + " " + _nm + " cache hits"): string)) #endif fun f (idx :'T) -> let cache = - match !cache with - | null -> cache := new Dictionary<_, _>(11 (* sz: int *) ) + match cache with + | null -> cache <- new Dictionary<_, _>(11 (* sz: int *) ) | _ -> () - !cache + cache match cache.TryGetValue idx with | true, v -> - incr count + count <- count + 1 v | _ -> let res = f idx @@ -804,12 +804,12 @@ let seekReadIndexedRows (numRows, rowReader, keyFunc, keyComparer, binaryChop, r res else - let res = ref [] + let mutable res = [] for i = 1 to numRows do let rowinfo = rowReader i if keyComparer (keyFunc rowinfo) = 0 then - res := rowConverter rowinfo :: !res - List.rev !res + res <- rowConverter rowinfo :: res + List.rev res let seekReadOptionalIndexedRow info = @@ -1352,26 +1352,26 @@ let getDataEndPointsDelayed (pectxt: PEReader) ctxtH = let (ctxt: ILMetadataReader) = getHole ctxtH let mdv = ctxt.mdfile.GetView() let dataStartPoints = - let res = ref [] + let mutable res = [] for i = 1 to ctxt.getNumRows TableNames.FieldRVA do let rva, _fidx = seekReadFieldRVARow ctxt mdv i - res := ("field", rva) :: !res + res <- ("field", rva) :: res for i = 1 to ctxt.getNumRows TableNames.ManifestResource do let (offset, _, _, TaggedIndex(_tag, idx)) = seekReadManifestResourceRow ctxt mdv i if idx = 0 then let rva = pectxt.resourcesAddr + offset - res := ("manifest resource", rva) :: !res - !res + res <- ("manifest resource", rva) :: res + res if isNil dataStartPoints then [] else let methodRVAs = - let res = ref [] + let mutable res = [] for i = 1 to ctxt.getNumRows TableNames.Method do let (rva, _, _, nameIdx, _, _) = seekReadMethodRow ctxt mdv i if rva <> 0 then let nm = readStringHeap ctxt nameIdx - res := (nm, rva) :: !res - !res + res <- (nm, rva) :: res + res ([ pectxt.textSegmentPhysicalLoc + pectxt.textSegmentPhysicalSize pectxt.dataSegmentPhysicalLoc + pectxt.dataSegmentPhysicalSize ] @ @@ -2196,20 +2196,20 @@ and seekReadMethod (ctxt: ILMetadataReader) mdv numtypars (idx: int) = and seekReadParams (ctxt: ILMetadataReader) mdv (retty, argtys) pidx1 pidx2 = - let retRes = ref (mkILReturn retty) + let mutable retRes = mkILReturn retty let paramsRes = argtys |> List.toArray |> Array.map mkILParamAnon for i = pidx1 to pidx2 - 1 do - seekReadParamExtras ctxt mdv (retRes, paramsRes) i - !retRes, List.ofArray paramsRes + seekReadParamExtras ctxt mdv (&retRes, paramsRes) i + retRes, List.ofArray paramsRes -and seekReadParamExtras (ctxt: ILMetadataReader) mdv (retRes, paramsRes) (idx: int) = +and seekReadParamExtras (ctxt: ILMetadataReader) mdv (retRes: byref, paramsRes) (idx: int) = let (flags, seq, nameIdx) = seekReadParamRow ctxt mdv idx let inOutMasked = (flags &&& 0x00FF) let hasMarshal = (flags &&& 0x2000) <> 0x0 let hasDefault = (flags &&& 0x1000) <> 0x0 let fmReader idx = seekReadIndexedRow (ctxt.getNumRows TableNames.FieldMarshal, seekReadFieldMarshalRow ctxt mdv, fst, hfmCompare idx, isSorted ctxt TableNames.FieldMarshal, (snd >> readBlobHeapAsNativeType ctxt)) if seq = 0 then - retRes := { !retRes with + retRes <- { retRes with Marshal=(if hasMarshal then Some (fmReader (TaggedIndex(hfm_ParamDef, idx))) else None) CustomAttrsStored = ctxt.customAttrsReader_ParamDef MetadataIndex = idx} @@ -2481,37 +2481,37 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s ilOffsetsOfLabels.[lab] <- ilOffset let ibuf = new ResizeArray<_>(sz/2) - let curr = ref 0 + let mutable curr = 0 let prefixes = { al=Aligned; tl= Normalcall; vol= Nonvolatile;ro=NormalAddress;constrained=None } - let lastb = ref 0x0 - let lastb2 = ref 0x0 - let b = ref 0x0 + let mutable lastb = 0x0 + let mutable lastb2 = 0x0 + let mutable b = 0x0 let get () = - lastb := seekReadByteAsInt32 pev (start + (!curr)) - incr curr - b := - if !lastb = 0xfe && !curr < sz then - lastb2 := seekReadByteAsInt32 pev (start + (!curr)) - incr curr - !lastb2 + lastb <- seekReadByteAsInt32 pev (start + curr) + curr <- curr + 1 + b <- + if lastb = 0xfe && curr < sz then + lastb2 <- seekReadByteAsInt32 pev (start + curr) + curr <- curr + 1 + lastb2 else - !lastb + lastb - let seqPointsRemaining = ref seqpoints + let mutable seqPointsRemaining = seqpoints - while !curr < sz do + while curr < sz do // registering "+string !curr+" as start of an instruction") - markAsInstructionStart !curr ibuf.Count + markAsInstructionStart curr ibuf.Count // Insert any sequence points into the instruction sequence while - (match !seqPointsRemaining with - | (i, _tag) :: _rest when i <= !curr -> true + (match seqPointsRemaining with + | (i, _tag) :: _rest when i <= curr -> true | _ -> false) do // Emitting one sequence point - let (_, tag) = List.head !seqPointsRemaining - seqPointsRemaining := List.tail !seqPointsRemaining + let (_, tag) = List.head seqPointsRemaining + seqPointsRemaining <- List.tail seqPointsRemaining ibuf.Add (I_seqpoint tag) // Read the prefixes. Leave lastb and lastb2 holding the instruction byte(s) @@ -2522,27 +2522,27 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s prefixes.ro<-NormalAddress prefixes.constrained<-None get () - while !curr < sz && - !lastb = 0xfe && - (!b = (i_constrained &&& 0xff) || - !b = (i_readonly &&& 0xff) || - !b = (i_unaligned &&& 0xff) || - !b = (i_volatile &&& 0xff) || - !b = (i_tail &&& 0xff)) do + while curr < sz && + lastb = 0xfe && + (b = (i_constrained &&& 0xff) || + b = (i_readonly &&& 0xff) || + b = (i_unaligned &&& 0xff) || + b = (i_volatile &&& 0xff) || + b = (i_tail &&& 0xff)) do begin - if !b = (i_unaligned &&& 0xff) then - let unal = seekReadByteAsInt32 pev (start + (!curr)) - incr curr + if b = (i_unaligned &&& 0xff) then + let unal = seekReadByteAsInt32 pev (start + curr) + curr <- curr + 1 prefixes.al <- if unal = 0x1 then Unaligned1 elif unal = 0x2 then Unaligned2 elif unal = 0x4 then Unaligned4 else (dprintn "bad alignment for unaligned"; Aligned) - elif !b = (i_volatile &&& 0xff) then prefixes.vol <- Volatile - elif !b = (i_readonly &&& 0xff) then prefixes.ro <- ReadonlyAddress - elif !b = (i_constrained &&& 0xff) then - let uncoded = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + elif b = (i_volatile &&& 0xff) then prefixes.vol <- Volatile + elif b = (i_readonly &&& 0xff) then prefixes.ro <- ReadonlyAddress + elif b = (i_constrained &&& 0xff) then + let uncoded = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 let ty = seekReadTypeDefOrRef ctxt numtypars AsObject [] (uncodedTokenToTypeDefOrRefOrSpec uncoded) prefixes.constrained <- Some ty else prefixes.tl <- Tailcall @@ -2552,45 +2552,45 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s // data for instruction begins at "+string !curr // Read and decode the instruction - if (!curr <= sz) then + if (curr <= sz) then let idecoder = - if !lastb = 0xfe then getTwoByteInstr ( !lastb2) - else getOneByteInstr ( !lastb) + if lastb = 0xfe then getTwoByteInstr lastb2 + else getOneByteInstr lastb let instr = match idecoder with | I_u16_u8_instr f -> - let x = seekReadByte pev (start + (!curr)) |> uint16 - curr := !curr + 1 + let x = seekReadByte pev (start + curr) |> uint16 + curr <- curr + 1 f prefixes x | I_u16_u16_instr f -> - let x = seekReadUInt16 pev (start + (!curr)) - curr := !curr + 2 + let x = seekReadUInt16 pev (start + curr) + curr <- curr + 2 f prefixes x | I_none_instr f -> f prefixes | I_i64_instr f -> - let x = seekReadInt64 pev (start + (!curr)) - curr := !curr + 8 + let x = seekReadInt64 pev (start + curr) + curr <- curr + 8 f prefixes x | I_i32_i8_instr f -> - let x = seekReadSByte pev (start + (!curr)) |> int32 - curr := !curr + 1 + let x = seekReadSByte pev (start + curr) |> int32 + curr <- curr + 1 f prefixes x | I_i32_i32_instr f -> - let x = seekReadInt32 pev (start + (!curr)) - curr := !curr + 4 + let x = seekReadInt32 pev (start + curr) + curr <- curr + 4 f prefixes x | I_r4_instr f -> - let x = seekReadSingle pev (start + (!curr)) - curr := !curr + 4 + let x = seekReadSingle pev (start + curr) + curr <- curr + 4 f prefixes x | I_r8_instr f -> - let x = seekReadDouble pev (start + (!curr)) - curr := !curr + 8 + let x = seekReadDouble pev (start + curr) + curr <- curr + 8 f prefixes x | I_field_instr f -> - let (tab, tok) = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let (tab, tok) = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 let fspec = if tab = TableNames.Field then seekReadFieldDefAsFieldSpec ctxt tok @@ -2601,8 +2601,8 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s | I_method_instr f -> // method instruction, curr = "+string !curr - let (tab, idx) = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let (tab, idx) = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 let (VarArgMethodData(enclTy, cc, nm, argtys, varargs, retty, minst)) = if tab = TableNames.Method then seekReadMethodDefOrRef ctxt numtypars (TaggedIndex(mdor_MethodDef, idx)) @@ -2623,42 +2623,42 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s let mspec = mkILMethSpecInTy (enclTy, cc, nm, argtys, retty, minst) f prefixes (mspec, varargs) | I_type_instr f -> - let uncoded = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let uncoded = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 let ty = seekReadTypeDefOrRef ctxt numtypars AsObject [] (uncodedTokenToTypeDefOrRefOrSpec uncoded) f prefixes ty | I_string_instr f -> - let (tab, idx) = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let (tab, idx) = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 if tab <> TableNames.UserStrings then dprintn "warning: bad table in user string for ldstr" f prefixes (readUserStringHeap ctxt idx) | I_conditional_i32_instr f -> - let offsDest = (seekReadInt32 pev (start + (!curr))) - curr := !curr + 4 - let dest = !curr + offsDest + let offsDest = (seekReadInt32 pev (start + curr)) + curr <- curr + 4 + let dest = curr + offsDest f prefixes (rawToLabel dest) | I_conditional_i8_instr f -> - let offsDest = int (seekReadSByte pev (start + (!curr))) - curr := !curr + 1 - let dest = !curr + offsDest + let offsDest = int (seekReadSByte pev (start + curr)) + curr <- curr + 1 + let dest = curr + offsDest f prefixes (rawToLabel dest) | I_unconditional_i32_instr f -> - let offsDest = (seekReadInt32 pev (start + (!curr))) - curr := !curr + 4 - let dest = !curr + offsDest + let offsDest = (seekReadInt32 pev (start + curr)) + curr <- curr + 4 + let dest = curr + offsDest f prefixes (rawToLabel dest) | I_unconditional_i8_instr f -> - let offsDest = int (seekReadSByte pev (start + (!curr))) - curr := !curr + 1 - let dest = !curr + offsDest + let offsDest = int (seekReadSByte pev (start + curr)) + curr <- curr + 1 + let dest = curr + offsDest f prefixes (rawToLabel dest) | I_invalid_instr -> - dprintn ("invalid instruction: "+string !lastb+ (if !lastb = 0xfe then ", "+string !lastb2 else "")) + dprintn ("invalid instruction: "+string lastb + (if lastb = 0xfe then ", "+ string lastb2 else "")) I_ret | I_tok_instr f -> - let (tab, idx) = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let (tab, idx) = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 (* REVIEW: this incorrectly labels all MemberRef tokens as ILMethod's: we should go look at the MemberRef sig to determine if it is a field or method *) let token_info = if tab = TableNames.Method || tab = TableNames.MemberRef (* REVIEW: generics or tab = TableNames.MethodSpec *) then @@ -2671,26 +2671,26 @@ and seekReadTopCode (ctxt: ILMetadataReader) pev mdv numtypars (sz: int) start s else failwith "bad token for ldtoken" f prefixes token_info | I_sig_instr f -> - let (tab, idx) = seekReadUncodedToken pev (start + (!curr)) - curr := !curr + 4 + let (tab, idx) = seekReadUncodedToken pev (start + curr) + curr <- curr + 4 if tab <> TableNames.StandAloneSig then dprintn "strange table for callsig token" let generic, _genarity, cc, retty, argtys, varargs = readBlobHeapAsMethodSig ctxt numtypars (seekReadStandAloneSigRow ctxt mdv idx) if generic then failwith "bad image: a generic method signature is begin used at a calli instruction" f prefixes (mkILCallSig (cc, argtys, retty), varargs) | I_switch_instr f -> - let n = (seekReadInt32 pev (start + (!curr))) - curr := !curr + 4 + let n = (seekReadInt32 pev (start + curr)) + curr <- curr + 1 let offsets = List.init n (fun _ -> - let i = (seekReadInt32 pev (start + (!curr))) - curr := !curr + 4 + let i = (seekReadInt32 pev (start + curr)) + curr <- curr + 4 i) - let dests = List.map (fun offs -> rawToLabel (!curr + offs)) offsets + let dests = List.map (fun offs -> rawToLabel (curr + offs)) offsets f prefixes dests ibuf.Add instr done // Finished reading instructions - mark the end of the instruction stream in case the PDB information refers to it. - markAsInstructionStart !curr ibuf.Count + markAsInstructionStart curr ibuf.Count // Build the function that maps from raw labels (offsets into the bytecode stream) to indexes in the AbsIL instruction stream let lab2pc = ilOffsetsOfLabels @@ -2823,11 +2823,11 @@ and seekReadMethodRVA (pectxt: PEReader) (ctxt: ILMetadataReader) (idx, nm, _int // Read all the sections that follow the method body. // These contain the exception clauses. - let nextSectionBase = ref (align 4 (codeBase + codeSize)) - let moreSections = ref hasMoreSections - let seh = ref [] - while !moreSections do - let sectionBase = !nextSectionBase + let mutable nextSectionBase = align 4 (codeBase + codeSize) + let mutable moreSections = hasMoreSections + let mutable seh = [] + while moreSections do + let sectionBase = nextSectionBase let sectionFlag = seekReadByte pev sectionBase // fat format for "+nm+", sectionFlag = " + string sectionFlag) let sectionSize, clauses = @@ -2907,16 +2907,16 @@ and seekReadMethodRVA (pectxt: PEReader) (ctxt: ILMetadataReader) (idx, nm, _int | _ -> sehMap.[key] <- [clause]) clauses ([], sehMap) ||> Seq.fold (fun acc (KeyValue(key, bs)) -> [ for b in bs -> {Range=key; Clause=b}: ILExceptionSpec ] @ acc) - seh := sehClauses - moreSections := (sectionFlag &&& e_CorILMethod_Sect_MoreSects) <> 0x0uy - nextSectionBase := sectionBase + sectionSize + seh <- sehClauses + moreSections <- (sectionFlag &&& e_CorILMethod_Sect_MoreSects) <> 0x0uy + nextSectionBase <- sectionBase + sectionSize done (* while *) (* Convert the linear code format to the nested code format *) if logging then dprintn ("doing localPdbInfos2") let localPdbInfos2 = List.map (fun f -> f raw2nextLab) localPdbInfos if logging then dprintn ("done localPdbInfos2, checking code...") - let code = buildILCode nm lab2pc instrs !seh localPdbInfos2 + let code = buildILCode nm lab2pc instrs seh localPdbInfos2 if logging then dprintn ("done checking code.") MethodBody.IL { IsZeroInit=initlocals @@ -3141,19 +3141,19 @@ let openMetadataReader (fileName, mdfile: BinaryFile, metadataPhysLoc, peinfo, p else let offset = seekReadInt32 mdv (pos + 0) let length = seekReadInt32 mdv (pos + 4) - let res = ref true - let fin = ref false - let n = ref 0 + let mutable res = true + let mutable fin = false + let mutable n = 0 // read and compare the stream name byte by byte - while (not !fin) do - let c= seekReadByteAsInt32 mdv (pos + 8 + (!n)) + while (not fin) do + let c= seekReadByteAsInt32 mdv (pos + 8 + n) if c = 0 then - fin := true - elif !n >= Array.length name || c <> name.[!n] then - res := false - incr n - if !res then Some(offset + metadataPhysLoc, length) - else look (i+1) (align 0x04 (pos + 8 + (!n))) + fin<- true + elif n >= Array.length name || c <> name.[n] then + res <- false + n <- n + 1 + if res then Some(offset + metadataPhysLoc, length) + else look (i+1) (align 0x04 (pos + 8 + n)) look 0 streamHeadersStart let findStream name = @@ -3248,15 +3248,15 @@ let openMetadataReader (fileName, mdfile: BinaryFile, metadataPhysLoc, peinfo, p let valid = seekReadInt64 mdv (tablesStreamPhysLoc + 8) let sorted = seekReadInt64 mdv (tablesStreamPhysLoc + 16) let tablesPresent, tableRowCount, startOfTables = - let present = ref [] + let mutable present = [] let numRows = Array.create 64 0 - let prevNumRowIdx = ref (tablesStreamPhysLoc + 24) + let mutable prevNumRowIdx = tablesStreamPhysLoc + 24 for i = 0 to 63 do if (valid &&& (int64 1 <<< i)) <> int64 0 then - present := i :: !present - numRows.[i] <- (seekReadInt32 mdv !prevNumRowIdx) - prevNumRowIdx := !prevNumRowIdx + 4 - List.rev !present, numRows, !prevNumRowIdx + present <- i :: present + numRows.[i] <- (seekReadInt32 mdv prevNumRowIdx) + prevNumRowIdx <- prevNumRowIdx + 4 + List.rev present, numRows, prevNumRowIdx let getNumRows (tab: TableName) = tableRowCount.[tab.Index] let numTables = tablesPresent.Length diff --git a/src/fsharp/CompileOps.fs b/src/fsharp/CompileOps.fs index 45d93c5cec3..067d3353a64 100644 --- a/src/fsharp/CompileOps.fs +++ b/src/fsharp/CompileOps.fs @@ -2479,7 +2479,7 @@ type AssemblyResolution = resolvedPath: string prepareToolTip: unit -> string sysdir: bool - ilAssemblyRef: ILAssemblyRef option ref + mutable ilAssemblyRef: ILAssemblyRef option } override this.ToString() = sprintf "%s%s" (if this.sysdir then "[sys]" else "") this.resolvedPath @@ -2494,7 +2494,7 @@ type AssemblyResolution = // member this.GetILAssemblyRef(ctok, reduceMemoryUsage, tryGetMetadataSnapshot) = cancellable { - match !this.ilAssemblyRef with + match this.ilAssemblyRef with | Some assemblyRef -> return assemblyRef | None -> let! assemblyRefOpt = @@ -2522,7 +2522,7 @@ type AssemblyResolution = tryGetMetadataSnapshot = tryGetMetadataSnapshot } use reader = OpenILModuleReader this.resolvedPath readerSettings mkRefToILAssembly reader.ILModuleDef.ManifestOfAssembly - this.ilAssemblyRef := Some assemblyRef + this.ilAssemblyRef <- Some assemblyRef return assemblyRef } @@ -2892,7 +2892,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) = resolvedPath = resolved prepareToolTip = (fun () -> resolved) sysdir = sysdir - ilAssemblyRef = ref None } + ilAssemblyRef = None } | None -> if String.Compare(ext, ".dll", StringComparison.OrdinalIgnoreCase)=0 @@ -2927,7 +2927,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) = let line(append: string) = append.Trim([|' '|])+"\n" line resolved + line fusionName) sysdir = sysdir - ilAssemblyRef = ref None } + ilAssemblyRef = None } | None -> None else None @@ -3057,7 +3057,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) = resolvedPath=canonicalItemSpec prepareToolTip = (fun () -> resolvedFile.prepareToolTip (originalReference.Text, canonicalItemSpec)) sysdir= tcConfig.IsSystemAssembly canonicalItemSpec - ilAssemblyRef = ref None }) + ilAssemblyRef = None }) (maxIndexOfReference, assemblyResolutions)) // When calculating the resulting resolutions, we're going to use the index of the reference @@ -3395,7 +3395,7 @@ let ParseOneInputLexbuf (tcConfig: TcConfig, lexResourceManager, conditionalComp try let skip = true in (* don't report whitespace from lexer *) let lightSyntaxStatus = LightSyntaxStatus (tcConfig.ComputeLightSyntaxInitialStatus filename, true) - let lexargs = mkLexargs (filename, conditionalCompilationDefines@tcConfig.conditionalCompilationDefines, lightSyntaxStatus, lexResourceManager, ref [], errorLogger, tcConfig.pathMap) + let lexargs = mkLexargs (filename, conditionalCompilationDefines@tcConfig.conditionalCompilationDefines, lightSyntaxStatus, lexResourceManager, [], errorLogger, tcConfig.pathMap) let shortFilename = SanitizeFileName filename tcConfig.implicitIncludeDir let input = Lexhelp.usingLexbufForParsing (lexbuf, filename) (fun lexbuf -> @@ -3525,24 +3525,24 @@ type TcAssemblyResolutions(tcConfig: TcConfig, results: AssemblyResolution list, let frameworkDLLs, nonFrameworkReferences = resolutions.GetAssemblyResolutions() |> List.partition (fun r -> r.sysdir) let unresolved = resolutions.GetUnresolvedReferences() #if DEBUG - let itFailed = ref false + let mutable itFailed = false let addedText = "\nIf you want to debug this right now, attach a debugger, and put a breakpoint in 'CompileOps.fs' near the text '!itFailed', and you can re-step through the assembly resolution logic." unresolved |> List.iter (fun (UnresolvedAssemblyReference(referenceText, _ranges)) -> if referenceText.Contains("mscorlib") then System.Diagnostics.Debug.Assert(false, sprintf "whoops, did not resolve mscorlib: '%s'%s" referenceText addedText) - itFailed := true) + itFailed <- true) frameworkDLLs |> List.iter (fun x -> if not(FileSystem.IsPathRootedShim(x.resolvedPath)) then System.Diagnostics.Debug.Assert(false, sprintf "frameworkDLL should be absolute path: '%s'%s" x.resolvedPath addedText) - itFailed := true) + itFailed <- true) nonFrameworkReferences |> List.iter (fun x -> if not(FileSystem.IsPathRootedShim(x.resolvedPath)) then System.Diagnostics.Debug.Assert(false, sprintf "nonFrameworkReference should be absolute path: '%s'%s" x.resolvedPath addedText) - itFailed := true) - if !itFailed then + itFailed <- true) + if itFailed then // idea is, put a breakpoint here and then step through let assemblyList = TcAssemblyResolutions.GetAllDllReferences tcConfig let resolutions = TcAssemblyResolutions.ResolveAssemblyReferences (ctok, tcConfig, assemblyList, []) @@ -4207,9 +4207,9 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse let systemRuntimeContainsType = // NOTE: do not touch this, edit: but we did, we had no choice - TPs cannot hold a strong reference on TcImports "ever". let tcImports = tcImportsWeak - let systemRuntimeContainsTypeRef = ref (fun typeName -> tcImports.SystemRuntimeContainsType typeName) - tcImportsStrong.AttachDisposeTypeProviderAction(fun () -> systemRuntimeContainsTypeRef := (fun _ -> raise (System.ObjectDisposedException("The type provider has been disposed")))) - fun arg -> systemRuntimeContainsTypeRef.Value arg + let mutable systemRuntimeContainsTypeRef = fun typeName -> tcImports.SystemRuntimeContainsType typeName + tcImportsStrong.AttachDisposeTypeProviderAction(fun () -> systemRuntimeContainsTypeRef <- fun _ -> raise (System.ObjectDisposedException("The type provider has been disposed"))) + fun arg -> systemRuntimeContainsTypeRef arg let providers = [ for designTimeAssemblyName in designTimeAssemblyNames do @@ -4676,7 +4676,7 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse error(InternalError("BuildFrameworkTcImports: no successful import of "+coreLibraryResolution.resolvedPath, coreLibraryResolution.originalReference.Range)) | None -> error(InternalError(sprintf "BuildFrameworkTcImports: no resolution of '%s'" coreLibraryReference.Text, rangeStartup)) - IlxSettings.ilxFsharpCoreLibAssemRef := + IlxSettings.ilxFsharpCoreLibAssemRef <- (let scoref = fslibCcuInfo.ILScopeRef match scoref with | ILScopeRef.Assembly aref -> Some aref @@ -4691,11 +4691,11 @@ and [] TcImports(tcConfigP: TcConfigProvider, initialResolutions: TcAsse #if DEBUG // the global_g reference cell is used only for debug printing - global_g := Some tcGlobals + global_g <- Some tcGlobals #endif // do this prior to parsing, since parsing IL assembly code may refer to mscorlib #if !NO_INLINE_IL_PARSER - FSharp.Compiler.AbstractIL.Internal.AsciiConstants.parseILGlobals := tcGlobals.ilg + FSharp.Compiler.AbstractIL.Internal.AsciiConstants.parseILGlobals <- tcGlobals.ilg #endif frameworkTcImports.SetTcGlobals tcGlobals return tcGlobals, frameworkTcImports @@ -5035,8 +5035,8 @@ module private ScriptPreprocessClosure = (tcConfig: TcConfig, inp: ParsedInput, pathOfMetaCommandSource) = let tcConfigB = tcConfig.CloneOfOriginalBuilder - let nowarns = ref [] - let getWarningNumber = fun () (m, s) -> nowarns := (s, m) :: !nowarns + let mutable nowarns = [] + let getWarningNumber = fun () (m, s) -> nowarns <- (s, m) :: nowarns let addReferencedAssemblyByPath = fun () (m, s) -> tcConfigB.AddReferencedAssemblyByPath(m, s) let addLoadedSource = fun () (m, s) -> tcConfigB.AddLoadedSource(m, s, pathOfMetaCommandSource) try @@ -5056,7 +5056,7 @@ module private ScriptPreprocessClosure = (closureSources, tcConfig: TcConfig, codeContext, lexResourceManager: Lexhelp.LexResourceManager) = - let tcConfig = ref tcConfig + let mutable tcConfig = tcConfig let observedSources = Observed() let rec loop (ClosureSource(filename, m, sourceText, parseRequired)) = @@ -5067,7 +5067,7 @@ module private ScriptPreprocessClosure = let parseResult, parseDiagnostics = let errorLogger = CapturingErrorLogger("FindClosureParse") use _unwindEL = PushErrorLoggerPhaseUntilUnwind (fun _ -> errorLogger) - let result = ParseScriptText (filename, sourceText, !tcConfig, codeContext, lexResourceManager, errorLogger) + let result = ParseScriptText (filename, sourceText, tcConfig, codeContext, lexResourceManager, errorLogger) result, errorLogger.Diagnostics match parseResult with @@ -5075,12 +5075,12 @@ module private ScriptPreprocessClosure = let errorLogger = CapturingErrorLogger("FindClosureMetaCommands") use _unwindEL = PushErrorLoggerPhaseUntilUnwind (fun _ -> errorLogger) let pathOfMetaCommandSource = Path.GetDirectoryName filename - let preSources = (!tcConfig).GetAvailableLoadedSources() + let preSources = tcConfig.GetAvailableLoadedSources() - let tcConfigResult, noWarns = ApplyMetaCommandsFromInputToTcConfigAndGatherNoWarn (!tcConfig, parsedScriptAst, pathOfMetaCommandSource) - tcConfig := tcConfigResult // We accumulate the tcConfig in order to collect assembly references + let tcConfigResult, noWarns = ApplyMetaCommandsFromInputToTcConfigAndGatherNoWarn (tcConfig, parsedScriptAst, pathOfMetaCommandSource) + tcConfig <- tcConfigResult // We accumulate the tcConfig in order to collect assembly references - let postSources = (!tcConfig).GetAvailableLoadedSources() + let postSources = tcConfig.GetAvailableLoadedSources() let sources = if preSources.Length < postSources.Length then postSources.[preSources.Length..] else [] //for (_, subFile) in sources do @@ -5094,7 +5094,7 @@ module private ScriptPreprocessClosure = yield ClosureFile(subFile, m, None, [], [], []) //printfn "yielding source %s" filename - yield ClosureFile(filename, m, Some parsedScriptAst, parseDiagnostics, errorLogger.Diagnostics, !noWarns) + yield ClosureFile(filename, m, Some parsedScriptAst, parseDiagnostics, errorLogger.Diagnostics, noWarns) | None -> //printfn "yielding source %s (failed parse)" filename @@ -5104,7 +5104,7 @@ module private ScriptPreprocessClosure = //printfn "yielding non-script source %s" filename yield ClosureFile(filename, m, None, [], [], []) ] - closureSources |> List.collect loop, !tcConfig + closureSources |> List.collect loop, tcConfig /// Reduce the full directive closure into LoadClosure let GetLoadClosure(ctok, rootFilename, closureFiles, tcConfig: TcConfig, codeContext) = diff --git a/src/fsharp/CompileOps.fsi b/src/fsharp/CompileOps.fsi index 230bd508de5..a0b031092ac 100644 --- a/src/fsharp/CompileOps.fsi +++ b/src/fsharp/CompileOps.fsi @@ -204,7 +204,7 @@ type AssemblyResolution = /// Whether or not this is an installed system assembly (for example, System.dll) sysdir: bool // Lazily populated ilAssemblyRef for this reference. - ilAssemblyRef: ILAssemblyRef option ref } + mutable ilAssemblyRef: ILAssemblyRef option } type UnresolvedAssemblyReference = UnresolvedAssemblyReference of string * AssemblyReference list diff --git a/src/fsharp/CompileOptions.fs b/src/fsharp/CompileOptions.fs index 29e854a83f1..441df5f3468 100644 --- a/src/fsharp/CompileOptions.fs +++ b/src/fsharp/CompileOptions.fs @@ -1015,7 +1015,7 @@ let testFlag tcConfigB = match s with | "StackSpan" -> tcConfigB.internalTestSpanStackReferring <- true | "ErrorRanges" -> tcConfigB.errorStyle <- ErrorStyle.TestErrors - | "Tracking" -> Lib.tracking := true (* general purpose on/off diagnostics flag *) + | "Tracking" -> Lib.tracking <- true (* general purpose on/off diagnostics flag *) | "NoNeedToTailcall" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportNoNeedToTailcall = true } | "FunctionSizes" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportFunctionSizes = true } | "TotalSizes" -> tcConfigB.optSettings <- { tcConfigB.optSettings with reportTotalSizes = true } @@ -1242,7 +1242,7 @@ let compilingFsLibFlag (tcConfigB: TcConfigBuilder) = tcConfigB.compilingFslib <- true tcConfigB.TurnWarningOff(rangeStartup, "42") ErrorLogger.reportLibraryOnlyFeatures <- false - IlxSettings.ilxCompilingFSharpCoreLib := true), + IlxSettings.ilxCompilingFSharpCoreLib <- true), Some(InternalCommandLineOption("--compiling-fslib", rangeCmdArgs)), None) let compilingFsLib20Flag = @@ -1312,7 +1312,7 @@ let deprecatedFlagsFsc tcConfigB = CompilerOption ("progress", tagNone, - OptionUnit (fun () -> progress := true), + OptionUnit (fun () -> progress <- true), Some(DeprecatedCommandLineOptionNoDescription("--progress", rangeCmdArgs)), None) compilingFsLibFlag tcConfigB @@ -1588,14 +1588,13 @@ let ApplyCommandLineArgs(tcConfigB: TcConfigBuilder, sourceFiles: string list, c // PrintWholeAssemblyImplementation //---------------------------------------------------------------------------- -let showTermFileCount = ref 0 +let mutable showTermFileCount = 0 let PrintWholeAssemblyImplementation g (tcConfig:TcConfig) outfile header expr = if tcConfig.showTerms then if tcConfig.writeTermsToFiles then let filename = outfile + ".terms" - let n = !showTermFileCount - showTermFileCount := n+1 - use f = System.IO.File.CreateText (filename + "-" + string n + "-" + header) + use f = System.IO.File.CreateText (filename + "-" + string showTermFileCount + "-" + header) + showTermFileCount <- showTermFileCount + 1 Layout.outL f (Layout.squashTo 192 (DebugPrint.implFilesL g expr)) else dprintf "\n------------------\nshowTerm: %s:\n" header @@ -1606,11 +1605,11 @@ let PrintWholeAssemblyImplementation g (tcConfig:TcConfig) outfile header expr = // ReportTime //---------------------------------------------------------------------------- -let tPrev = ref None -let nPrev = ref None +let mutable tPrev = None +let mutable nPrev = None let ReportTime (tcConfig:TcConfig) descr = - match !nPrev with + match nPrev with | None -> () | Some prevDescr -> if tcConfig.pause then @@ -1651,7 +1650,7 @@ let ReportTime (tcConfig:TcConfig) descr = let ptime = System.Diagnostics.Process.GetCurrentProcess() let wsNow = ptime.WorkingSet64/1000000L - match !tPrev, !nPrev with + match tPrev, nPrev with | Some (timePrev, gcPrev:int []), Some prevDescr -> let spanGC = [| for i in 0 .. maxGen -> System.GC.CollectionCount i - gcPrev.[i] |] dprintf "TIME: %4.1f Delta: %4.1f Mem: %3d" @@ -1662,9 +1661,9 @@ let ReportTime (tcConfig:TcConfig) descr = prevDescr | _ -> () - tPrev := Some (timeNow, gcNow) + tPrev <- Some (timeNow, gcNow) - nPrev := Some descr + nPrev <- Some descr //---------------------------------------------------------------------------- // OPTIMIZATION - support - addDllToOptEnv diff --git a/src/fsharp/ConstraintSolver.fs b/src/fsharp/ConstraintSolver.fs index f5aade84008..e22437de141 100644 --- a/src/fsharp/ConstraintSolver.fs +++ b/src/fsharp/ConstraintSolver.fs @@ -1710,9 +1710,9 @@ and AddConstraint (csenv: ConstraintSolverEnv) ndeep m2 trace tp newConstraint // This works because the types on the r.h.s. of subtype // constraints are head-types and so any further inferences are equational. let collect ty = - let res = ref [] - IterateEntireHierarchyOfType (fun x -> res := x :: !res) g amap m AllowMultiIntfInstantiations.No ty - List.rev !res + let mutable res = [] + IterateEntireHierarchyOfType (fun x -> res <- x :: res) g amap m AllowMultiIntfInstantiations.No ty + List.rev res let parents1 = collect ty1 let parents2 = collect ty2 trackErrors { diff --git a/src/fsharp/ErrorLogger.fs b/src/fsharp/ErrorLogger.fs index c37a3395121..2f515d566a8 100755 --- a/src/fsharp/ErrorLogger.fs +++ b/src/fsharp/ErrorLogger.fs @@ -441,8 +441,8 @@ let PushThreadBuildPhaseUntilUnwind (phase:BuildPhase) = let PushErrorLoggerPhaseUntilUnwind(errorLoggerTransformer : ErrorLogger -> #ErrorLogger) = let oldErrorLogger = CompileThreadStatic.ErrorLogger let newErrorLogger = errorLoggerTransformer oldErrorLogger - let newInstalled = ref true - let newIsInstalled() = if !newInstalled then () else (assert false; (); (*failwith "error logger used after unwind"*)) // REVIEW: ok to throw? + let mutable newInstalled = true + let newIsInstalled() = if newInstalled then () else (assert false; (); (*failwith "error logger used after unwind"*)) // REVIEW: ok to throw? let chkErrorLogger = { new ErrorLogger("PushErrorLoggerPhaseUntilUnwind") with member __.DiagnosticSink(phasedError, isError) = newIsInstalled(); newErrorLogger.DiagnosticSink(phasedError, isError) member __.ErrorCount = newIsInstalled(); newErrorLogger.ErrorCount } @@ -452,7 +452,7 @@ let PushErrorLoggerPhaseUntilUnwind(errorLoggerTransformer : ErrorLogger -> #Err { new System.IDisposable with member __.Dispose() = CompileThreadStatic.ErrorLogger <- oldErrorLogger - newInstalled := false } + newInstalled <- false } let SetThreadBuildPhaseNoUnwind(phase:BuildPhase) = CompileThreadStatic.BuildPhase <- phase let SetThreadErrorLoggerNoUnwind errorLogger = CompileThreadStatic.ErrorLogger <- errorLogger diff --git a/src/fsharp/FSharp.Build/FSharp.Build.fsproj b/src/fsharp/FSharp.Build/FSharp.Build.fsproj index 76b9c7043e9..f427453df99 100644 --- a/src/fsharp/FSharp.Build/FSharp.Build.fsproj +++ b/src/fsharp/FSharp.Build/FSharp.Build.fsproj @@ -26,6 +26,7 @@ + diff --git a/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs b/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs index 63c9bf12457..3428d60ad8f 100644 --- a/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs +++ b/src/fsharp/FSharp.Build/FSharpEmbedResourceText.fs @@ -394,18 +394,18 @@ open Printf stringInfos |> Seq.iter (fun (lineNum, (optErrNum,ident), str, holes, netFormatString) -> let formalArgs = new System.Text.StringBuilder() let actualArgs = new System.Text.StringBuilder() - let firstTime = ref true - let n = ref 0 + let mutable firstTime = true + let mutable n = 0 formalArgs.Append "(" |> ignore for hole in holes do - if !firstTime then - firstTime := false + if firstTime then + firstTime <- false else formalArgs.Append ", " |> ignore actualArgs.Append " " |> ignore - formalArgs.Append(sprintf "a%d : %s" !n hole) |> ignore - actualArgs.Append(sprintf "a%d" !n) |> ignore - n := !n + 1 + formalArgs.Append(sprintf "a%d : %s" n hole) |> ignore + actualArgs.Append(sprintf "a%d" n) |> ignore + n <- n + 1 formalArgs.Append ")" |> ignore fprintfn out " /// %s" str fprintfn out " /// (Originally from %s:%d)" filename (lineNum+1) diff --git a/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets b/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets index 8421c8ea504..0b51d7204f6 100644 --- a/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets +++ b/src/fsharp/FSharp.Build/Microsoft.FSharp.Targets @@ -29,6 +29,7 @@ this file. + true diff --git a/src/fsharp/FSharp.Build/SubstituteText.fs b/src/fsharp/FSharp.Build/SubstituteText.fs new file mode 100644 index 00000000000..16b8eab5245 --- /dev/null +++ b/src/fsharp/FSharp.Build/SubstituteText.fs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information. + +namespace FSharp.Build + +open System +open System.Collections +open System.IO +open Microsoft.Build.Framework +open Microsoft.Build.Utilities + +type SubstituteText () = + + let mutable _buildEngine : IBuildEngine = null + let mutable _hostObject : ITaskHost = null + + let mutable copiedFiles = new ResizeArray() + let mutable embeddedResources : ITaskItem[] = [||] + + [] + member this.EmbeddedResources + with get() = embeddedResources + and set(value) = embeddedResources <- value + + [] + member this.CopiedFiles + with get() = copiedFiles.ToArray() + + interface ITask with + member this.BuildEngine + with get() = _buildEngine + and set(value) = _buildEngine <- value + + member this.HostObject + with get() = _hostObject + and set(value) = _hostObject <- value + + member this.Execute() = + copiedFiles.Clear() + if not(isNull embeddedResources) then + for item in embeddedResources do + // Update ITaskItem metadata to point to new location + let sourcePath = item.GetMetadata("FullPath") + + let pattern1 = item.GetMetadata("Pattern1") + let pattern2 = item.GetMetadata("Pattern2") + + // Is there any replacement to do? + if not (String.IsNullOrWhiteSpace(pattern1) && String.IsNullOrWhiteSpace(pattern2)) then + if not(String.IsNullOrWhiteSpace(sourcePath)) then + try + let getTargetPathFrom key = + let md = item.GetMetadata(key) + let path = Path.GetDirectoryName(md) + let filename = Path.GetFileName(md) + let target = Path.Combine(path, @"..\resources", filename) + target + + // Copy from the location specified in Identity + let sourcePath=item.GetMetadata("Identity") + + // Copy to the location specified in TargetPath unless no TargetPath is provided, then use Identity + let targetPath= + let identityPath = getTargetPathFrom "Identity" + let intermediateTargetPath = item.GetMetadata("IntermediateTargetPath") + if not (String.IsNullOrWhiteSpace(intermediateTargetPath)) then + let filename = Path.GetFileName(identityPath) + let target = Path.Combine(intermediateTargetPath, filename) + target + else + identityPath + + item.ItemSpec <- targetPath + + // Transform file + let mutable contents = File.ReadAllText(sourcePath) + if not (String.IsNullOrWhiteSpace(pattern1)) then + let replacement = item.GetMetadata("Replacement1") + contents <- contents.Replace(pattern1, replacement) + if not (String.IsNullOrWhiteSpace(pattern2)) then + let replacement = item.GetMetadata("Replacement2") + contents <- contents.Replace(pattern2, replacement) + + let directory = Path.GetDirectoryName(targetPath) + if not(Directory.Exists(directory)) then + Directory.CreateDirectory(directory) |>ignore + + File.WriteAllText(targetPath, contents) + with + | _ -> () + + copiedFiles.Add(item) + true diff --git a/src/fsharp/FSharp.Compiler.Private.Scripting/FSharpScript.fs b/src/fsharp/FSharp.Compiler.Private.Scripting/FSharpScript.fs index 29cdf6d6d0a..7cb7327a273 100644 --- a/src/fsharp/FSharp.Compiler.Private.Scripting/FSharpScript.fs +++ b/src/fsharp/FSharp.Compiler.Private.Scripting/FSharpScript.fs @@ -83,3 +83,4 @@ type FSharpScript(?captureInput: bool, ?captureOutput: bool, ?additionalArgs: st stdin.Dispose() stdout.Dispose() stderr.Dispose() + (fsi :> IDisposable).Dispose() diff --git a/src/fsharp/FSharp.Core/async.fs b/src/fsharp/FSharp.Core/async.fs index b6ab88a16f6..4655b0d73f3 100644 --- a/src/fsharp/FSharp.Core/async.fs +++ b/src/fsharp/FSharp.Core/async.fs @@ -1647,19 +1647,19 @@ namespace Microsoft.FSharp.Control let resultCell = new ResultCell<_>() let! cancellationToken = cancellationTokenAsync let innerCTS = new CancellationTokenSource() // innerCTS does not require disposal - let ctsRef = ref innerCTS + let mutable ctsRef = innerCTS let reg = cancellationToken.Register( (fun _ -> - match !ctsRef with + match ctsRef with | null -> () | otherwise -> otherwise.Cancel()), null) do QueueAsync innerCTS.Token // since innerCTS is not ever Disposed, can call reg.Dispose() without a safety Latch - (fun res -> ctsRef := null; reg.Dispose(); resultCell.RegisterResult (Ok res, reuseThread=true)) - (fun edi -> ctsRef := null; reg.Dispose(); resultCell.RegisterResult (Error edi, reuseThread=true)) - (fun err -> ctsRef := null; reg.Dispose(); resultCell.RegisterResult (Canceled err, reuseThread=true)) + (fun res -> ctsRef <- null; reg.Dispose(); resultCell.RegisterResult (Ok res, reuseThread=true)) + (fun edi -> ctsRef <- null; reg.Dispose(); resultCell.RegisterResult (Error edi, reuseThread=true)) + (fun err -> ctsRef <- null; reg.Dispose(); resultCell.RegisterResult (Canceled err, reuseThread=true)) computation |> unfake @@ -1713,10 +1713,10 @@ namespace Microsoft.FSharp.Control [] // give the extension member a 'nice', unmangled compiled name, unique within this module member stream.AsyncRead count = async { let buffer = Array.zeroCreate count - let i = ref 0 - while !i < count do - let! n = stream.AsyncRead(buffer, !i, count - !i) - i := !i + n + let mutable i = 0 + while i < count do + let! n = stream.AsyncRead(buffer, i, count - i) + i <- i + n if n = 0 then raise(System.IO.EndOfStreamException(SR.GetString(SR.failedReadEnoughBytes))) return buffer } @@ -1746,16 +1746,16 @@ namespace Microsoft.FSharp.Control [] // give the extension member a 'nice', unmangled compiled name, unique within this module member req.AsyncGetResponse() : Async= - let canceled = ref false // WebException with Status = WebExceptionStatus.RequestCanceled can be raised in other situations except cancellation, use flag to filter out false positives + let mutable canceled = false // WebException with Status = WebExceptionStatus.RequestCanceled can be raised in other situations except cancellation, use flag to filter out false positives // Use CreateTryWithFilterAsync to allow propagation of exception without losing stack Async.FromBeginEnd(beginAction=req.BeginGetResponse, endAction = req.EndGetResponse, - cancelAction = fun() -> canceled := true; req.Abort()) + cancelAction = fun() -> canceled <- true; req.Abort()) |> CreateTryWithFilterAsync (fun exn -> match exn with | :? System.Net.WebException as webExn - when webExn.Status = System.Net.WebExceptionStatus.RequestCanceled && !canceled -> + when webExn.Status = System.Net.WebExceptionStatus.RequestCanceled && canceled -> Some (Async.BindResult(AsyncResult.Canceled (OperationCanceledException webExn.Message))) | _ -> diff --git a/src/fsharp/FSharp.Core/eventmodule.fs b/src/fsharp/FSharp.Core/eventmodule.fs index 1d615a307c3..fe907373f59 100644 --- a/src/fsharp/FSharp.Core/eventmodule.fs +++ b/src/fsharp/FSharp.Core/eventmodule.fs @@ -40,12 +40,12 @@ namespace Microsoft.FSharp.Control [] let scan collector state (sourceEvent: IEvent<'Delegate,'T>) = - let state = ref state + let mutable state = state let ev = new Event<_>() sourceEvent.Add(fun msg -> - let z = !state + let z = state let z = collector z msg - state := z; + state <- z; ev.Trigger(z)) ev.Publish @@ -55,12 +55,12 @@ namespace Microsoft.FSharp.Control [] let pairwise (sourceEvent : IEvent<'Delegate,'T>) : IEvent<'T * 'T> = let ev = new Event<'T * 'T>() - let lastArgs = ref None + let mutable lastArgs = None sourceEvent.Add(fun args2 -> - (match !lastArgs with + (match lastArgs with | None -> () | Some args1 -> ev.Trigger(args1,args2)) - lastArgs := Some args2) + lastArgs <- Some args2) ev.Publish diff --git a/src/fsharp/FSharp.Core/local.fs b/src/fsharp/FSharp.Core/local.fs index 0e97166352d..89db09ecdaa 100644 --- a/src/fsharp/FSharp.Core/local.fs +++ b/src/fsharp/FSharp.Core/local.fs @@ -1185,13 +1185,13 @@ module internal Array = let count = min count len let res = zeroCreateUnchecked count : 'T[][] let minChunkSize = len / count - let startIndex = ref 0 + let mutable startIndex = 0 for i = 0 to len % count - 1 do - res.[i] <- subUnchecked !startIndex (minChunkSize + 1) array - startIndex := !startIndex + minChunkSize + 1 + res.[i] <- subUnchecked startIndex (minChunkSize + 1) array + startIndex <- startIndex + minChunkSize + 1 for i = len % count to count - 1 do - res.[i] <- subUnchecked !startIndex minChunkSize array - startIndex := !startIndex + minChunkSize + res.[i] <- subUnchecked startIndex minChunkSize array + startIndex <- startIndex + minChunkSize res module internal Seq = diff --git a/src/fsharp/FSharp.Core/map.fs b/src/fsharp/FSharp.Core/map.fs index d0fccda1fdb..0c7554586f9 100644 --- a/src/fsharp/FSharp.Core/map.fs +++ b/src/fsharp/FSharp.Core/map.fs @@ -374,8 +374,8 @@ module MapTree = mkFromEnumerator comparer empty ie let copyToArray m (arr: _[]) i = - let j = ref i - m |> iter (fun x y -> arr.[!j] <- KeyValuePair(x, y); j := !j + 1) + let mutable j = i + m |> iter (fun x y -> arr.[j] <- KeyValuePair(x, y); j <- j + 1) /// Imperative left-to-right iterators. [] diff --git a/src/fsharp/FSharp.Core/prim-types.fs b/src/fsharp/FSharp.Core/prim-types.fs index 554c19113ee..40a9e9571ae 100644 --- a/src/fsharp/FSharp.Core/prim-types.fs +++ b/src/fsharp/FSharp.Core/prim-types.fs @@ -4681,14 +4681,14 @@ namespace Microsoft.FSharp.Core else // a constrained, common simple iterator that is fast. let singleStepRangeEnumerator () = - let value : Ref<'T> = ref (n - LanguagePrimitives.GenericOne) + let mutable value = n - LanguagePrimitives.GenericOne let inline current () = // according to IEnumerator.Current documentation, the result of of Current // is undefined prior to the first call of MoveNext and post called to MoveNext // that return false (see https://msdn.microsoft.com/en-us/library/58e146b7%28v=vs.110%29.aspx) // so we should be able to just return value here, which would be faster - let derefValue = !value + let derefValue = value if derefValue < n then notStarted () elif derefValue > m then @@ -4704,14 +4704,14 @@ namespace Microsoft.FSharp.Core interface IEnumerator with member __.Current = box (current ()) - member __.Reset () = value := n - LanguagePrimitives.GenericOne + member __.Reset () = value <- n - LanguagePrimitives.GenericOne member __.MoveNext () = - let derefValue = !value + let derefValue = value if derefValue < m then - value := derefValue + LanguagePrimitives.GenericOne + value <- derefValue + LanguagePrimitives.GenericOne true elif derefValue = m then - value := derefValue + LanguagePrimitives.GenericOne + value <- derefValue + LanguagePrimitives.GenericOne false else false } diff --git a/src/fsharp/FSharp.Core/reflect.fs b/src/fsharp/FSharp.Core/reflect.fs index 0dd38fe2257..828fddac3d3 100644 --- a/src/fsharp/FSharp.Core/reflect.fs +++ b/src/fsharp/FSharp.Core/reflect.fs @@ -391,12 +391,12 @@ module internal Impl = | false, _ -> // the Dictionary<>s here could be ConcurrentDictionary<>'s, but then // that would lock while initializing the Type array (maybe not an issue) - let a = ref (Array.init 8 (fun i -> makeIt (i + 1))) + let mutable a = Array.init 8 (fun i -> makeIt (i + 1)) lock dictionaryLock (fun () -> match tables.TryGetValue asm with - | true, t -> a := t - | false, _ -> tables.Add(asm, !a)) - !a + | true, t -> a <- t + | false, _ -> tables.Add(asm, a)) + a | true, t -> t match tys.Length with diff --git a/src/fsharp/FSharp.Core/seq.fs b/src/fsharp/FSharp.Core/seq.fs index 0b6e7ed2e5d..71f16b6fa2e 100644 --- a/src/fsharp/FSharp.Core/seq.fs +++ b/src/fsharp/FSharp.Core/seq.fs @@ -84,13 +84,13 @@ namespace Microsoft.FSharp.Collections let mapi f (e : IEnumerator<_>) : IEnumerator<_> = let f = OptimizedClosures.FSharpFunc<_, _, _>.Adapt(f) - let i = ref (-1) + let mutable i = -1 upcast { new MapEnumerator<_>() with member __.DoMoveNext curr = - i := !i + 1 + i <- i + 1 if e.MoveNext() then - curr <- f.Invoke(!i, e.Current) + curr <- f.Invoke(i, e.Current) true else false @@ -118,13 +118,13 @@ namespace Microsoft.FSharp.Collections let mapi2 f (e1 : IEnumerator<_>) (e2 : IEnumerator<_>) : IEnumerator<_> = let f = OptimizedClosures.FSharpFunc<_, _, _, _>.Adapt(f) - let i = ref (-1) + let mutable i = -1 upcast { new MapEnumerator<_>() with member __.DoMoveNext curr = - i := !i + 1 + i <- i + 1 if (e1.MoveNext() && e2.MoveNext()) then - curr <- f.Invoke(!i, e1.Current, e2.Current) + curr <- f.Invoke(i, e1.Current, e2.Current) true else false @@ -160,11 +160,11 @@ namespace Microsoft.FSharp.Collections } let choose f (e : IEnumerator<'T>) = - let started = ref false - let curr = ref None + let mutable started = false + let mutable curr = None let get() = - check !started - match !curr with + check started + match curr with | None -> alreadyFinished() | Some x -> x @@ -173,25 +173,25 @@ namespace Microsoft.FSharp.Collections interface IEnumerator with member __.Current = box (get()) member __.MoveNext() = - if not !started then started := true - curr := None - while ((!curr).IsNone && e.MoveNext()) do - curr := f e.Current - Option.isSome !curr + if not started then started <- true + curr <- None + while (curr.IsNone && e.MoveNext()) do + curr <- f e.Current + Option.isSome curr member __.Reset() = noReset() interface System.IDisposable with member __.Dispose() = e.Dispose() } let filter f (e : IEnumerator<'T>) = - let started = ref false + let mutable started = false let this = { new IEnumerator<'T> with - member __.Current = check !started; e.Current + member __.Current = check started; e.Current interface IEnumerator with - member __.Current = check !started; box e.Current + member __.Current = check started; box e.Current member __.MoveNext() = let rec next() = - if not !started then started := true + if not started then started <- true e.MoveNext() && (f e.Current || next()) next() member __.Reset() = noReset() @@ -200,15 +200,15 @@ namespace Microsoft.FSharp.Collections this let unfold f x : IEnumerator<_> = - let state = ref x + let mutable state = x upcast { new MapEnumerator<_>() with member __.DoMoveNext curr = - match f !state with + match f state with | None -> false | Some (r,s) -> curr <- r - state := s + state <- s true member __.Dispose() = () } @@ -229,34 +229,36 @@ namespace Microsoft.FSharp.Collections // The lazy creation of the cache nodes means enumerations that skip many Current values are not delayed by GC. // For example, the full enumeration of Seq.initInfinite in the tests. // state - let index = ref unstarted + let mutable index = unstarted // a Lazy node to cache the result/exception - let current = ref (Unchecked.defaultof<_>) - let setIndex i = index := i; current := (Unchecked.defaultof<_>) // cache node unprimed, initialised on demand. + let mutable current = Unchecked.defaultof<_> + let setIndex i = + index <- i + current <- (Unchecked.defaultof<_>) // cache node unprimed, initialised on demand. let getCurrent() = - if !index = unstarted then notStarted() - if !index = completed then alreadyFinished() - match box !current with - | null -> current := Lazy<_>.Create(fun () -> f !index) + if index = unstarted then notStarted() + if index = completed then alreadyFinished() + match box current with + | null -> current <- Lazy<_>.Create(fun () -> f index) | _ -> () // forced or re-forced immediately. - (!current).Force() + current.Force() { new IEnumerator<'U> with member __.Current = getCurrent() interface IEnumerator with member __.Current = box (getCurrent()) member __.MoveNext() = - if !index = completed then + if index = completed then false - elif !index = unstarted then + elif index = unstarted then setIndex 0 true else - if !index = System.Int32.MaxValue then raise <| System.InvalidOperationException (SR.GetString(SR.enumerationPastIntMaxValue)) - if !index = finalIndex then + if index = System.Int32.MaxValue then raise <| System.InvalidOperationException (SR.GetString(SR.enumerationPastIntMaxValue)) + if index = finalIndex then false else - setIndex (!index + 1) + setIndex (index + 1) true member __.Reset() = noReset() @@ -858,10 +860,10 @@ namespace Microsoft.FSharp.Collections let truncate count (source: seq<'T>) = checkNonNull "source" source if count <= 0 then empty else - seq { let i = ref 0 + seq { let mutable i = 0 use ie = source.GetEnumerator() - while !i < count && ie.MoveNext() do - i := !i + 1 + while i < count && ie.MoveNext() do + i <- i + 1 yield ie.Current } [] @@ -869,22 +871,22 @@ namespace Microsoft.FSharp.Collections checkNonNull "source" source seq { use ie = source.GetEnumerator() if ie.MoveNext() then - let iref = ref ie.Current + let mutable iref = ie.Current while ie.MoveNext() do let j = ie.Current - yield (!iref, j) - iref := j } + yield (iref, j) + iref <- j } [] let scan<'T,'State> folder (state:'State) (source : seq<'T>) = checkNonNull "source" source let f = OptimizedClosures.FSharpFunc<_, _, _>.Adapt folder - seq { let zref = ref state - yield !zref + seq { let mutable zref = state + yield zref use ie = source.GetEnumerator() while ie.MoveNext() do - zref := f.Invoke(!zref, ie.Current) - yield !zref } + zref <- f.Invoke(zref, ie.Current) + yield zref } [] let tryFindBack predicate (source : seq<'T>) = @@ -948,21 +950,21 @@ namespace Microsoft.FSharp.Collections [|SR.GetString SR.inputMustBePositive; windowSize|] seq { let arr = Array.zeroCreateUnchecked windowSize - let r = ref (windowSize - 1) - let i = ref 0 + let mutable r =windowSize - 1 + let mutable i = 0 use e = source.GetEnumerator() while e.MoveNext() do - arr.[!i] <- e.Current - i := (!i + 1) % windowSize - if !r = 0 then + arr.[i] <- e.Current + i <- (i + 1) % windowSize + if r = 0 then if windowSize < 32 then - yield Array.init windowSize (fun j -> arr.[(!i+j) % windowSize]) + yield Array.init windowSize (fun j -> arr.[(i+j) % windowSize]) else let result = Array.zeroCreateUnchecked windowSize - Array.Copy(arr, !i, result, 0, windowSize - !i) - Array.Copy(arr, 0, result, windowSize - !i, !i) + Array.Copy(arr, i, result, 0, windowSize - i) + Array.Copy(arr, 0, result, windowSize - i, i) yield result - else r := (!r - 1) + else r <- (r - 1) } [] @@ -978,7 +980,7 @@ namespace Microsoft.FSharp.Collections // * the prefix followed by elts from the enumerator are the initial sequence. // * the prefix contains only as many elements as the longest enumeration so far. let prefix = ResizeArray<_>() - let enumeratorR = ref None : IEnumerator<'T> option option ref // nested options rather than new type... + let enumeratorR = ref None // None = Unstarted. // Some(Some e) = Started. // Some None = Finished. @@ -1014,10 +1016,9 @@ namespace Microsoft.FSharp.Collections let cleanup() = lock enumeratorR (fun () -> prefix.Clear() - begin match !enumeratorR with + match !enumeratorR with | Some (Some e) -> IEnumerator.dispose e | _ -> () - end enumeratorR := None) (new CachedSeq<_>(cleanup, result) :> seq<_>) @@ -1305,9 +1306,9 @@ namespace Microsoft.FSharp.Collections let takeWhile predicate (source: seq<_>) = checkNonNull "source" source seq { use e = source.GetEnumerator() - let latest = ref Unchecked.defaultof<_> - while e.MoveNext() && (latest := e.Current; predicate !latest) do - yield !latest } + let mutable latest = Unchecked.defaultof<_> + while e.MoveNext() && (latest <- e.Current; predicate latest) do + yield latest } [] let skip count (source: seq<_>) = @@ -1324,12 +1325,12 @@ namespace Microsoft.FSharp.Collections let skipWhile predicate (source: seq<_>) = checkNonNull "source" source seq { use e = source.GetEnumerator() - let latest = ref (Unchecked.defaultof<_>) - let ok = ref false + let mutable latest = Unchecked.defaultof<_> + let mutable ok = false while e.MoveNext() do - if (latest := e.Current; (!ok || not (predicate !latest))) then - ok := true - yield !latest } + if (latest <- e.Current; (ok || not (predicate latest))) then + ok <- true + yield latest } [] let forall2 predicate (source1: seq<_>) (source2: seq<_>) = @@ -1469,14 +1470,14 @@ namespace Microsoft.FSharp.Collections let nextChunk() = let res = Array.zeroCreateUnchecked chunkSize res.[0] <- e.Current - let i = ref 1 - while !i < chunkSize && e.MoveNext() do - res.[!i] <- e.Current - i := !i + 1 - if !i = chunkSize then + let mutable i = 1 + while i < chunkSize && e.MoveNext() do + res.[i] <- e.Current + i <- i + 1 + if i = chunkSize then res else - res |> Array.subUnchecked 0 !i + res |> Array.subUnchecked 0 i while e.MoveNext() do yield nextChunk() } diff --git a/src/fsharp/FSharp.Core/seqcore.fs b/src/fsharp/FSharp.Core/seqcore.fs index 05bde3b90d6..bc54d01e670 100644 --- a/src/fsharp/FSharp.Core/seqcore.fs +++ b/src/fsharp/FSharp.Core/seqcore.fs @@ -68,16 +68,16 @@ namespace Microsoft.FSharp.Collections lock r (fun () -> match !r with None -> None | Some _ as res -> r := None; res) let generateWhileSome openf compute closef : IEnumerator<'U> = - let started = ref false - let curr = ref None + let mutable started = false + let mutable curr = None let state = ref (Some(openf())) let getCurr() = - check !started - match !curr with None -> alreadyFinished() | Some x -> x - let start() = if not !started then (started := true) + check started + match curr with None -> alreadyFinished() | Some x -> x + let start() = if not started then (started <- true) let dispose() = readAndClear state |> Option.iter closef - let finish() = try dispose() finally curr := None + let finish() = try dispose() finally curr <- None { new IEnumerator<'U> with member __.Current = getCurr() interface IEnumerator with @@ -89,7 +89,7 @@ namespace Microsoft.FSharp.Collections | Some s -> match (try compute s with e -> finish(); reraise()) with | None -> finish(); false - | Some _ as x -> curr := x; true + | Some _ as x -> curr <- x; true member __.Reset() = noReset() interface System.IDisposable with @@ -306,14 +306,14 @@ namespace Microsoft.FSharp.Core.CompilerServices mkSeq (fun () -> new ConcatEnumerator<_,_>(sources) :> IEnumerator<'T>) let EnumerateWhile (guard: unit -> bool) (source: seq<'T>) : seq<'T> = - let started = ref false - let curr = ref None + let mutable started = false + let mutable curr = None let getCurr() = - IEnumerator.check !started - match !curr with None -> IEnumerator.alreadyFinished() | Some x -> x - let start() = if not !started then (started := true) + IEnumerator.check started + match curr with None -> IEnumerator.alreadyFinished() | Some x -> x + let start() = if not started then (started <- true) - let finish() = (curr := None) + let finish() = (curr <- None) mkConcatSeq (mkSeq (fun () -> { new IEnumerator<_> with @@ -324,7 +324,7 @@ namespace Microsoft.FSharp.Core.CompilerServices start() let keepGoing = (try guard() with e -> finish (); reraise ()) in if keepGoing then - curr := Some(source); true + curr <- Some(source); true else finish(); false member x.Reset() = IEnumerator.noReset() diff --git a/src/fsharp/FSharp.Core/set.fs b/src/fsharp/FSharp.Core/set.fs index 5da5152f296..bd6bb970002 100644 --- a/src/fsharp/FSharp.Core/set.fs +++ b/src/fsharp/FSharp.Core/set.fs @@ -429,13 +429,13 @@ module internal SetTree = not i.stack.IsEmpty let mkIEnumerator s = - let i = ref (mkIterator s) + let mutable i = mkIterator s { new IEnumerator<_> with - member __.Current = current !i + member __.Current = current i interface IEnumerator with - member __.Current = box (current !i) - member __.MoveNext() = moveNext !i - member __.Reset() = i := mkIterator s + member __.Current = box (current i) + member __.MoveNext() = moveNext i + member __.Reset() = i <- mkIterator s interface System.IDisposable with member __.Dispose() = () } @@ -486,8 +486,8 @@ module internal SetTree = loop s [] let copyToArray s (arr: _[]) i = - let j = ref i - iter (fun x -> arr.[!j] <- x; j := !j + 1) s + let mutable j = i + iter (fun x -> arr.[j] <- x; j <- j + 1) s let toArray s = let n = (count s) diff --git a/src/fsharp/LegacyHostedCompilerForTesting.fs b/src/fsharp/LegacyHostedCompilerForTesting.fs index b793915b5c8..821882d84d5 100644 --- a/src/fsharp/LegacyHostedCompilerForTesting.fs +++ b/src/fsharp/LegacyHostedCompilerForTesting.fs @@ -61,20 +61,20 @@ type internal InProcCompiler(legacyReferenceResolver) = let ctok = AssumeCompilationThreadWithoutEvidence () let loggerProvider = InProcErrorLoggerProvider() - let exitCode = ref 0 + let mutable exitCode = 0 let exiter = { new Exiter with - member this.Exit n = exitCode := n; raise StopProcessing } + member this.Exit n = exitCode <- n; raise StopProcessing } try typecheckAndCompile(ctok, argv, legacyReferenceResolver, false, ReduceMemoryFlag.Yes, CopyFSharpCoreFlag.Yes, exiter, loggerProvider.Provider, None, None) with | StopProcessing -> () | ReportedError _ | WrappedError(ReportedError _,_) -> - exitCode := 1 + exitCode <- 1 () let output : CompilationOutput = { Warnings = loggerProvider.CapturedWarnings; Errors = loggerProvider.CapturedErrors } - !exitCode = 0, output + exitCode = 0, output /// in-proc version of fsc.exe type internal FscCompiler(legacyReferenceResolver) = diff --git a/src/fsharp/LegacyMSBuildReferenceResolver.fs b/src/fsharp/LegacyMSBuildReferenceResolver.fs index 8425f6197ba..a07941509ff 100644 --- a/src/fsharp/LegacyMSBuildReferenceResolver.fs +++ b/src/fsharp/LegacyMSBuildReferenceResolver.fs @@ -255,12 +255,12 @@ module LegacyMSBuildReferenceResolver "Software\Microsoft\.NetFramework", "AssemblyFoldersEx" , "" if Array.isEmpty references then [| |] else - let backgroundException = ref false + let mutable backgroundException = false let protect f = - if not !backgroundException then + if not backgroundException then try f() - with _ -> backgroundException := true + with _ -> backgroundException <- true let engine = { new IBuildEngine with diff --git a/src/fsharp/LexFilter.fs b/src/fsharp/LexFilter.fs index a998dfd6994..e7630a84ff4 100755 --- a/src/fsharp/LexFilter.fs +++ b/src/fsharp/LexFilter.fs @@ -399,39 +399,66 @@ type LexbufState(startPos: Position, member x.EndPos = endPos member x.PastEOF = pastEOF -[] -type PositionTuple = - val X: Position - val Y: Position - new (x: Position, y: Position) = { X = x; Y = y } - /// Used to save the state related to a token +/// Treat as though this is read-only. [] type TokenTup = - val Token : token - val LexbufState : LexbufState - val LastTokenPos: PositionTuple + // This is mutable for performance reasons. + val mutable Token : token + val mutable LexbufState : LexbufState + val mutable LastTokenPos: Position new (token, state, lastTokenPos) = { Token=token; LexbufState=state;LastTokenPos=lastTokenPos } /// Returns starting position of the token member x.StartPos = x.LexbufState.StartPos /// Returns end position of the token member x.EndPos = x.LexbufState.EndPos - + +type TokenTupPool() = + + /// Arbitrary. + /// When parsing the compiler's source files, the pool didn't come close to reaching this limit. + /// Therefore, this seems like a reasonable limit to handle 99% of cases. + [] + let maxSize = 100 + + let stack = System.Collections.Generic.Stack(Array.init maxSize (fun _ -> TokenTup(Unchecked.defaultof<_>, Unchecked.defaultof<_>, Unchecked.defaultof<_>))) + + member _.Rent() = + if stack.Count = 0 then + assert false + TokenTup(Unchecked.defaultof<_>, Unchecked.defaultof<_>, Unchecked.defaultof<_>) + else + stack.Pop() + + member _.Return(x: TokenTup) = + x.Token <- Unchecked.defaultof<_> + x.LexbufState <- Unchecked.defaultof<_> + x.LastTokenPos <- Unchecked.defaultof<_> + if stack.Count >= maxSize then + assert false + else + stack.Push x + /// Returns a token 'tok' with the same position as this token - member x.UseLocation tok = + member pool.UseLocation(x: TokenTup, tok) = let tokState = x.LexbufState - TokenTup(tok, LexbufState(tokState.StartPos, tokState.EndPos, false), x.LastTokenPos) + let tokTup = pool.Rent() + tokTup.Token <- tok + tokTup.LexbufState <- LexbufState(tokState.StartPos, tokState.EndPos, false) + tokTup.LastTokenPos <- x.LastTokenPos + tokTup /// Returns a token 'tok' with the same position as this token, except that /// it is shifted by specified number of characters from the left and from the right /// Note: positive value means shift to the right in both cases - member x.UseShiftedLocation(tok, shiftLeft, shiftRight) = - let tokState = x.LexbufState - TokenTup(tok, LexbufState(tokState.StartPos.ShiftColumnBy shiftLeft, - tokState.EndPos.ShiftColumnBy shiftRight, false), x.LastTokenPos) - - + member pool.UseShiftedLocation(x: TokenTup, tok, shiftLeft, shiftRight) = + let tokState = x.LexbufState + let tokTup = pool.Rent() + tokTup.Token <- tok + tokTup.LexbufState <- LexbufState(tokState.StartPos.ShiftColumnBy shiftLeft, tokState.EndPos.ShiftColumnBy shiftRight, false) + tokTup.LastTokenPos <- x.LastTokenPos + tokTup //---------------------------------------------------------------------------- // Utilities for the tokenizer that are needed in other places @@ -541,6 +568,12 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | Parser.EOF _ -> tokenTup.LexbufState.StartPos.ColumnMinusOne | _ -> tokenTup.LexbufState.StartPos + //---------------------------------------------------------------------------- + // TokenTup pool + //-------------------------------------------------------------------------- + + let pool = TokenTupPool() + //---------------------------------------------------------------------------- // Part II. The state of the new lex stream object. //-------------------------------------------------------------------------- @@ -552,7 +585,6 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, let runWrappedLexerInConsistentLexbufState() = let state = if haveLexbufState then savedLexbufState else getLexbufState() setLexbufState state - let lastTokenStart = state.StartPos let lastTokenEnd = state.EndPos let token = lexer lexbuf // Now we've got the token, remember the lexbuf state, associating it with the token @@ -560,7 +592,12 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, let tokenLexbufState = getLexbufState() savedLexbufState <- tokenLexbufState haveLexbufState <- true - TokenTup(token, tokenLexbufState, PositionTuple(lastTokenStart, lastTokenEnd)) + + let tokenTup = pool.Rent() + tokenTup.Token <- token + tokenTup.LexbufState <- tokenLexbufState + tokenTup.LastTokenPos <- lastTokenEnd + tokenTup //---------------------------------------------------------------------------- // Fetch a raw token, either from the old lexer or from our delayedStack @@ -870,11 +907,11 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | INFIX_COMPARE_OP " let tokenEndPos = tokenTup.LexbufState.EndPos if isAdjacent tokenTup lookaheadTokenTup then - let stack = ref [] + let mutable stack = [] let rec scanAhead nParen = let lookaheadTokenTup = popNextTokenTup() let lookaheadToken = lookaheadTokenTup.Token - stack := (lookaheadTokenTup, true) :: !stack + stack <- (lookaheadTokenTup, true) :: stack let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup match lookaheadToken with | Parser.EOF _ | SEMICOLON_SEMICOLON -> false @@ -890,7 +927,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, let hasAfterOp = (match lookaheadToken with GREATER _ -> false | _ -> true) if nParen > 0 then // Don't smash the token if there is an after op and we're in a nested paren - stack := (lookaheadTokenTup, not hasAfterOp) :: (!stack).Tail + stack <- (lookaheadTokenTup, not hasAfterOp) :: stack.Tail scanAhead nParen else // On successful parse of a set of type parameters, look for an adjacent (, e.g. @@ -898,13 +935,13 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // and insert a HIGH_PRECEDENCE_PAREN_APP if not hasAfterOp && (match nextTokenIsAdjacentLParenOrLBrack lookaheadTokenTup with Some LPAREN -> true | _ -> false) then let dotTokenTup = peekNextTokenTup() - stack := (dotTokenTup.UseLocation(HIGH_PRECEDENCE_PAREN_APP), false) :: !stack + stack <- (pool.UseLocation(dotTokenTup, HIGH_PRECEDENCE_PAREN_APP), false) :: stack true | INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp)) -> let nParen = nParen - greaters.Length if nParen > 0 then // Don't smash the token if there is an after op and we're in a nested paren - stack := (lookaheadTokenTup, not afterOp.IsSome) :: (!stack).Tail + stack <- (lookaheadTokenTup, not afterOp.IsSome) :: stack.Tail scanAhead nParen else // On successful parse of a set of type parameters, look for an adjacent (, e.g. @@ -912,7 +949,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // and insert a HIGH_PRECEDENCE_PAREN_APP if afterOp.IsNone && (match nextTokenIsAdjacentLParenOrLBrack lookaheadTokenTup with Some LPAREN -> true | _ -> false) then let dotTokenTup = peekNextTokenTup() - stack := (dotTokenTup.UseLocation(HIGH_PRECEDENCE_PAREN_APP), false) :: !stack + stack <- (pool.UseLocation(dotTokenTup, HIGH_PRECEDENCE_PAREN_APP), false) :: stack true | (LPAREN | LESS _ | LBRACK | LBRACK_LESS | INFIX_COMPARE_OP " scanAhead (nParen+1) @@ -963,26 +1000,31 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, let res = scanAhead 0 // Put the tokens back on and smash them up if needed - !stack |> List.iter (fun (tokenTup, smash) -> + stack |> List.iter (fun (tokenTup, smash) -> if smash then match tokenTup.Token with | INFIX_COMPARE_OP " - delayToken (tokenTup.UseShiftedLocation(INFIX_STAR_DIV_MOD_OP "/", 1, 0)) - delayToken (tokenTup.UseShiftedLocation(LESS res, 0, -1)) + delayToken (pool.UseShiftedLocation(tokenTup, INFIX_STAR_DIV_MOD_OP "/", 1, 0)) + delayToken (pool.UseShiftedLocation(tokenTup, LESS res, 0, -1)) + pool.Return tokenTup | GREATER_BAR_RBRACK -> - delayToken (tokenTup.UseShiftedLocation(BAR_RBRACK, 1, 0)) - delayToken (tokenTup.UseShiftedLocation(GREATER res, 0, -2)) + delayToken (pool.UseShiftedLocation(tokenTup, BAR_RBRACK, 1, 0)) + delayToken (pool.UseShiftedLocation(tokenTup, GREATER res, 0, -2)) + pool.Return tokenTup | GREATER_RBRACK -> - delayToken (tokenTup.UseShiftedLocation(RBRACK, 1, 0)) - delayToken (tokenTup.UseShiftedLocation(GREATER res, 0, -1)) + delayToken (pool.UseShiftedLocation(tokenTup, RBRACK, 1, 0)) + delayToken (pool.UseShiftedLocation(tokenTup, GREATER res, 0, -1)) + pool.Return tokenTup | GREATER _ -> - delayToken (tokenTup.UseLocation(GREATER res)) + delayToken (pool.UseLocation(tokenTup, GREATER res)) + pool.Return tokenTup | (INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp) as opstr)) -> match afterOp with | None -> () - | Some tok -> delayToken (tokenTup.UseShiftedLocation(tok, greaters.Length, 0)) + | Some tok -> delayToken (pool.UseShiftedLocation(tokenTup, tok, greaters.Length, 0)) for i = greaters.Length - 1 downto 0 do - delayToken (tokenTup.UseShiftedLocation(greaters.[i] res, i, -opstr.Length + i + 1)) + delayToken (pool.UseShiftedLocation(tokenTup, greaters.[i] res, i, -opstr.Length + i + 1)) + pool.Return tokenTup | _ -> delayToken tokenTup else delayToken tokenTup) @@ -1109,7 +1151,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // span of inserted token lasts from the col + 1 of the prev token // to the beginning of current token let lastTokenPos = - let pos = tokenTup.LastTokenPos.Y + let pos = tokenTup.LastTokenPos pos.ShiftColumnBy 1 returnToken (lexbufStateForInsertedDummyTokens (lastTokenPos, tokenTup.LexbufState.StartPos)) tok @@ -1235,7 +1277,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, popCtxt() effectsToDo <- (fun() -> if debug then dprintf "--> because %s is coming, inserting OBLOCKEND\n" keywordName - delayTokenNoProcessing (tokenTup.UseLocation OBLOCKEND)) :: effectsToDo + delayTokenNoProcessing (pool.UseLocation(tokenTup, OBLOCKEND))) :: effectsToDo | CtxtSeqBlock(_, _, NoAddBlockEnd) -> if debug then dprintf "--> because %s is coming, popping CtxtSeqBlock\n" keywordName popCtxt() @@ -1243,7 +1285,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, popCtxt() effectsToDo <- (fun() -> if debug then dprintf "--> because %s is coming, inserting ORIGHT_BLOCK_END\n" keywordName - delayTokenNoProcessing (tokenTup.UseLocation(ORIGHT_BLOCK_END))) :: effectsToDo + delayTokenNoProcessing (pool.UseLocation(tokenTup, ORIGHT_BLOCK_END))) :: effectsToDo | CtxtVanilla _ -> if debug then dprintf "--> because %s is coming, popping CtxtVanilla\n" keywordName popCtxt() @@ -1253,12 +1295,16 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // Why _six_ TYPE_COMING_SOON? It's rather arbitrary, this means we can recover from up to six unmatched parens before failing. The unit tests (with 91609 in the name) demonstrate this. // Don't "delayToken tokenTup", we are replacing it, so consume it. if debug then dprintf "inserting 6 copies of %+A before %+A\n" comingSoon isHere - delayTokenNoProcessing (tokenTup.UseLocation isHere) + delayTokenNoProcessing (pool.UseLocation(tokenTup, isHere)) for i in 1..6 do - delayTokenNoProcessing (tokenTup.UseLocation comingSoon) + delayTokenNoProcessing (pool.UseLocation(tokenTup, comingSoon)) for e in List.rev effectsToDo do e() // push any END tokens after pushing the TYPE_IS_HERE and TYPE_COMING_SOON stuff, so that they come before those in the token stream + let returnToken tokenLexbufState token = + pool.Return tokenTup + returnToken tokenLexbufState token + match token, offsideStack with // inserted faux tokens need no other processing | _ when tokensThatNeedNoProcessingCount > 0 -> @@ -1279,7 +1325,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // reset on ';;' rule. A ';;' terminates ALL entries | SEMICOLON_SEMICOLON, [] -> if debug then dprintf ";; scheduling a reset\n" - delayToken(tokenTup.UseLocation ORESET) + delayToken(pool.UseLocation(tokenTup, ORESET)) returnToken tokenLexbufState SEMICOLON_SEMICOLON | ORESET, [] -> @@ -1287,6 +1333,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // NOTE: The parser thread of F# Interactive will often be blocked on this call, e.g. after an entry has been // processed and we're waiting for the first token of the next entry. peekInitial() |> ignore + pool.Return tokenTup hwTokenFetch true @@ -1300,7 +1347,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, if tokenStartCol < offsidePos.Column then warn tokenTup (FSComp.SR.lexfltIncorrentIndentationOfIn()) popCtxt() // Make sure we queue a dummy token at this position to check if any other pop rules apply - delayToken(tokenTup.UseLocation(ODUMMY token)) + delayToken(pool.UseLocation(tokenTup, ODUMMY token)) returnToken tokenLexbufState (if blockLet then ODECLEND else token) // Balancing rule. Encountering a 'done' balances with a 'do'. i.e. even a non-offside 'done' closes a 'do' @@ -1309,7 +1356,8 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, if debug then dprintf "DONE at %a terminates CtxtDo(offsidePos=%a)\n" outputPos tokenStartPos outputPos offsidePos popCtxt() // reprocess as the DONE may close a DO context - delayToken(tokenTup.UseLocation ODECLEND) + delayToken(pool.UseLocation(tokenTup, ODECLEND)) + pool.Return tokenTup hwTokenFetch useBlockRule // Balancing rule. Encountering a ')' or '}' balances with a '(' or '{', even if not offside @@ -1318,7 +1366,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, if debug then dprintf "RPAREN/RBRACE/BAR_RBRACE/RBRACK/BAR_RBRACK/RQUOTE/END at %a terminates CtxtParen()\n" outputPos tokenStartPos popCtxt() // Queue a dummy token at this position to check if any closing rules apply - delayToken(tokenTup.UseLocation(ODUMMY token)) + delayToken(pool.UseLocation(tokenTup, ODUMMY token)) returnToken tokenLexbufState token // Balancing rule. Encountering a 'end' can balance with a 'with' but only when not offside @@ -1326,7 +1374,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, when not (tokenStartCol + 1 <= offsidePos.Column) -> if debug then dprintf "END at %a terminates CtxtWithAsAugment()\n" outputPos tokenStartPos popCtxt() - delayToken(tokenTup.UseLocation(ODUMMY token)) // make sure we queue a dummy token at this position to check if any closing rules apply + delayToken(pool.UseLocation(tokenTup, ODUMMY token)) // make sure we queue a dummy token at this position to check if any closing rules apply returnToken tokenLexbufState OEND // Transition rule. CtxtNamespaceHead ~~~> CtxtSeqBlock @@ -1698,6 +1746,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, insertComingSoonTokens("MODULE", MODULE_COMING_SOON, MODULE_IS_HERE) if debug then dprintf "MODULE: entering CtxtModuleHead, awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtModuleHead (tokenStartPos, token)) + pool.Return tokenTup hwTokenFetch useBlockRule // exception ... ~~~> CtxtException @@ -1746,7 +1795,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | Some tok -> popCtxt() if debug then dprintf "--> inserting %+A\n" tok - delayTokenNoProcessing (tokenTup.UseLocation tok) + delayTokenNoProcessing (pool.UseLocation(tokenTup, tok)) // for the rest, we silently pop them | _ -> popCtxt() popCtxt() // pop CtxtMemberBody @@ -2034,7 +2083,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, // if e1 then e2 // else if e3 then e4 // else if e5 then e6 - let _ = popNextTokenTup() + popNextTokenTup() |> pool.Return if debug then dprintf "ELSE IF: replacing ELSE IF with ELIF, pushing CtxtIf, CtxtVanilla(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtIf tokenStartPos) returnToken tokenLexbufState ELIF @@ -2102,6 +2151,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, insertComingSoonTokens("TYPE", TYPE_COMING_SOON, TYPE_IS_HERE) if debug then dprintf "TYPE, pushing CtxtTypeDefns(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtTypeDefns tokenStartPos) + pool.Return tokenTup hwTokenFetch useBlockRule | TRY, _ -> @@ -2120,6 +2170,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | ODUMMY(_), _ -> if debug then dprintf "skipping dummy token as no offside rules apply\n" + pool.Return tokenTup hwTokenFetch (useBlockRule) // Ordinary tokens start a vanilla block @@ -2142,32 +2193,35 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | Some LPAREN -> HIGH_PRECEDENCE_PAREN_APP | Some LBRACK -> HIGH_PRECEDENCE_BRACK_APP | _ -> failwith "unreachable" - delayToken(dotTokenTup.UseLocation hpa) + delayToken(pool.UseLocation(dotTokenTup, hpa)) delayToken tokenTup true // Insert HIGH_PRECEDENCE_TYAPP if needed | (DELEGATE | IDENT _ | IEEE64 _ | IEEE32 _ | DECIMAL _ | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ | UINT8 _ | UINT16 _ | UINT32 _ | UINT64 _ | BIGNUM _) when peekAdjacentTypars false tokenTup -> let lessTokenTup = popNextTokenTup() - delayToken (lessTokenTup.UseLocation(match lessTokenTup.Token with LESS _ -> LESS true | _ -> failwith "unreachable")) + delayToken (pool.UseLocation(lessTokenTup, match lessTokenTup.Token with LESS _ -> LESS true | _ -> failwith "unreachable")) if debug then dprintf "softwhite inserting HIGH_PRECEDENCE_TYAPP at dotTokenPos = %a\n" outputPos (startPosOfTokenTup lessTokenTup) - delayToken (lessTokenTup.UseLocation(HIGH_PRECEDENCE_TYAPP)) + delayToken (pool.UseLocation(lessTokenTup, HIGH_PRECEDENCE_TYAPP)) delayToken (tokenTup) + pool.Return lessTokenTup true // Split this token to allow "1..2" for range specification | INT32_DOT_DOT (i, v) -> let dotDotPos = new LexbufState(tokenTup.EndPos.ShiftColumnBy(-2), tokenTup.EndPos, false) - delayToken(new TokenTup(DOT_DOT, dotDotPos, tokenTup.LastTokenPos)) - delayToken(tokenTup.UseShiftedLocation(INT32(i, v), 0, -2)) + delayToken(let rented = pool.Rent() in rented.Token <- DOT_DOT; rented.LexbufState <- dotDotPos; rented.LastTokenPos <- tokenTup.LastTokenPos; rented) + delayToken(pool.UseShiftedLocation(tokenTup, INT32(i, v), 0, -2)) + pool.Return tokenTup true // Split @>. and @@>. into two | RQUOTE_DOT (s, raw) -> let dotPos = new LexbufState(tokenTup.EndPos.ShiftColumnBy(-1), tokenTup.EndPos, false) - delayToken(new TokenTup(DOT, dotPos, tokenTup.LastTokenPos)) - delayToken(tokenTup.UseShiftedLocation(RQUOTE(s, raw), 0, -1)) + delayToken(let rented = pool.Rent() in rented.Token <- DOT; rented.LexbufState <- dotPos; rented.LastTokenPos <- tokenTup.LastTokenPos; rented) + delayToken(pool.UseShiftedLocation(tokenTup, RQUOTE(s, raw), 0, -1)) + pool.Return tokenTup true | MINUS | PLUS_MINUS_OP _ | PERCENT_OP _ | AMP | AMP_AMP @@ -2176,7 +2230,7 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | PERCENT_OP s -> (s = "%") || (s = "%%") | _ -> true) && nextTokenIsAdjacent tokenTup && - not (prevWasAtomicEnd && (tokenTup.LastTokenPos.Y = startPosOfTokenTup tokenTup))) -> + not (prevWasAtomicEnd && (tokenTup.LastTokenPos = startPosOfTokenTup tokenTup))) -> let plus = match tokenTup.Token with @@ -2189,7 +2243,10 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | _ -> false let nextTokenTup = popNextTokenTup() /// Merge the location of the prefix token and the literal - let delayMergedToken tok = delayToken(new TokenTup(tok, new LexbufState(tokenTup.LexbufState.StartPos, nextTokenTup.LexbufState.EndPos, nextTokenTup.LexbufState.PastEOF), tokenTup.LastTokenPos)) + let delayMergedToken tok = + delayToken(let rented = pool.Rent() in rented.Token <- tok; rented.LexbufState <- new LexbufState(tokenTup.LexbufState.StartPos, nextTokenTup.LexbufState.EndPos, nextTokenTup.LexbufState.PastEOF); rented.LastTokenPos <- tokenTup.LastTokenPos; rented) + pool.Return nextTokenTup + pool.Return tokenTup let noMerge() = let tokenName = match tokenTup.Token with @@ -2201,7 +2258,8 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, | _ -> failwith "unreachable" let token = ADJACENT_PREFIX_OP tokenName delayToken nextTokenTup - delayToken (tokenTup.UseLocation token) + delayToken (pool.UseLocation(tokenTup, token)) + pool.Return tokenTup if plusOrMinus then match nextTokenTup.Token with @@ -2227,14 +2285,18 @@ type LexFilterImpl (lightSyntaxStatus: LightSyntaxStatus, compilingFsLib, lexer, and pushCtxtSeqBlockAt(p: TokenTup, addBlockBegin, addBlockEnd) = if addBlockBegin then if debug then dprintf "--> insert OBLOCKBEGIN \n" - delayToken(p.UseLocation OBLOCKBEGIN) + delayToken(pool.UseLocation(p, OBLOCKBEGIN)) pushCtxt p (CtxtSeqBlock(FirstInSeqBlock, startPosOfTokenTup p, addBlockEnd)) let rec swTokenFetch() = let tokenTup = popNextTokenTup() let tokenReplaced = rulesForBothSoftWhiteAndHardWhite tokenTup if tokenReplaced then swTokenFetch() - else returnToken tokenTup.LexbufState tokenTup.Token + else + let lexbufState = tokenTup.LexbufState + let tok = tokenTup.Token + pool.Return tokenTup + returnToken lexbufState tok //---------------------------------------------------------------------------- // Part VI. Publish the new lexer function. diff --git a/src/fsharp/NameResolution.fs b/src/fsharp/NameResolution.fs index 389c6e16b04..30f456fd86c 100644 --- a/src/fsharp/NameResolution.fs +++ b/src/fsharp/NameResolution.fs @@ -2528,7 +2528,7 @@ let rec ResolveExprLongIdentPrim sink (ncenv: NameResolver) first fullyQualified ResolveExprLongIdentPrim sink ncenv false FullyQualified m ad nenv typeNameResInfo id2 rest2 isOpenDecl else if isNil rest && fullyQualified <> FullyQualified then - let typeError = ref None + let mutable typeError = None // Single identifier. Lookup the unqualified names in the environment let envSearch = match nenv.eUnqualifiedItems.TryGetValue id.idText with @@ -2549,7 +2549,7 @@ let rec ResolveExprLongIdentPrim sink (ncenv: NameResolver) first fullyQualified let resInfo, item, rest = ForceRaise res ResolutionInfo.SendEntityPathToSink(sink, ncenv, nenv, ItemOccurence.Use, ad, resInfo, ResultTyparChecker(fun () -> CheckAllTyparsInferrable ncenv.amap m item)) Some(item, rest) - | Exception e -> typeError := Some e; None + | Exception e -> typeError <- Some e; None | true, res -> let fresh = FreshenUnqualifiedItem ncenv m res @@ -2587,7 +2587,7 @@ let rec ResolveExprLongIdentPrim sink (ncenv: NameResolver) first fullyQualified | Result _ as res -> ForceRaise res | _ -> let failingCase = - match !typeError with + match typeError with | Some e -> raze e | _ -> let suggestNamesAndTypes (addToBuffer: string -> unit) = diff --git a/src/fsharp/PatternMatchCompilation.fs b/src/fsharp/PatternMatchCompilation.fs index 5a8eaa6534c..6c5cc67e5ac 100644 --- a/src/fsharp/PatternMatchCompilation.fs +++ b/src/fsharp/PatternMatchCompilation.fs @@ -103,16 +103,16 @@ let BindSubExprOfInput g amap gtps (PBind(v, tyscheme)) m (SubExpr(accessf, (ve2 accessf [] ve2 else let tyargs = - let someSolved = ref false + let mutable someSolved = false let freezeVar gtp = if isBeingGeneralized gtp tyscheme then mkTyparTy gtp else - someSolved := true + someSolved <- true TypeRelations.ChooseTyparSolution g amap gtp let solutions = List.map freezeVar gtps - if !someSolved then + if someSolved then TypeRelations.IterativelySubstituteTyparSolutions g gtps solutions else solutions @@ -1268,10 +1268,10 @@ let CompilePatternBasic BindProjectionPatterns newActives s | TPat_range (c1, c2, m) -> - let res = ref [] + let mutable res = [] for i = int c1 to int c2 do - res := BindProjectionPattern (Active(path, subExpr, TPat_const(Const.Char(char i), m))) s @ !res - !res + res <- BindProjectionPattern (Active(path, subExpr, TPat_const(Const.Char(char i), m))) s @ res + res // Assign an identifier to each TPat_query based on our knowledge of the 'identity' of the active pattern, if any | TPat_query ((_, _, apatVrefOpt, _, _), _, _) -> let uniqId = diff --git a/src/fsharp/TastOps.fs b/src/fsharp/TastOps.fs index 94d0bcffaef..fe30b314d15 100644 --- a/src/fsharp/TastOps.fs +++ b/src/fsharp/TastOps.fs @@ -3348,7 +3348,7 @@ module DebugPrint = | TType_measure unt -> #if DEBUG leftL (tagText "{") ^^ - (match !global_g with + (match global_g with | None -> wordL (tagText "") | Some g -> let sortVars (vs:(Typar * Rational) list) = vs |> List.sortBy (fun (v, _) -> v.DisplayName) @@ -3407,7 +3407,7 @@ module DebugPrint = and auxTraitL env (ttrait: TraitConstraintInfo) = #if DEBUG let (TTrait(tys, nm, memFlags, argtys, rty, _)) = ttrait - match !global_g with + match global_g with | None -> wordL (tagText "") | Some g -> let rty = GetFSharpViewOfReturnType g rty @@ -3527,7 +3527,7 @@ module DebugPrint = let slotSigL (slotsig: SlotSig) = #if DEBUG let (TSlotSig(nm, ty, tps1, tps2, pms, rty)) = slotsig - match !global_g with + match global_g with | None -> wordL(tagText "") | Some g -> let rty = GetFSharpViewOfReturnType g rty @@ -6494,10 +6494,10 @@ let FoldImplFile folders state implFile = ExprFolders(folders).FoldImplFile stat //------------------------------------------------------------------------- let ExprStats x = - let count = ref 0 - let folders = {ExprFolder0 with exprIntercept = (fun _ noInterceptF z x -> (count := !count + 1; noInterceptF z x))} + let mutable count = 0 + let folders = {ExprFolder0 with exprIntercept = (fun _ noInterceptF z x -> (count <- count + 1; noInterceptF z x))} let () = FoldExpr folders () x - string !count + " TExpr nodes" + string count + " TExpr nodes" #endif //------------------------------------------------------------------------- diff --git a/src/fsharp/TastPickle.fs b/src/fsharp/TastPickle.fs index 5098b16ecd0..5b0acbc5174 100644 --- a/src/fsharp/TastPickle.fs +++ b/src/fsharp/TastPickle.fs @@ -554,12 +554,12 @@ let p_maybe_lazy p (x: MaybeLazy<_>) st = p_lazy_impl p x.Value st let p_hole () = - let h = ref (None : ('T -> WriterState -> unit) option) - (fun f -> h := Some f), (fun x st -> match !h with Some f -> f x st | None -> pfailwith st "p_hole: unfilled hole") + let mutable h = None + (fun f -> h <- Some f), (fun x st -> match h with Some f -> f x st | None -> pfailwith st "p_hole: unfilled hole") let p_hole2 () = - let h = ref (None : ('Arg -> 'T -> WriterState -> unit) option) - (fun f -> h := Some f), (fun arg x st -> match !h with Some f -> f arg x st | None -> pfailwith st "p_hole2: unfilled hole") + let mutable h = None + (fun f -> h <- Some f), (fun arg x st -> match h with Some f -> f arg x st | None -> pfailwith st "p_hole2: unfilled hole") let u_array_core f n st = let res = Array.zeroCreate n @@ -675,8 +675,8 @@ let u_lazy u st = let u_hole () = - let h = ref (None : 'T unpickler option) - (fun f -> h := Some f), (fun st -> match !h with Some f -> f st | None -> ufailwith st "u_hole: unfilled hole") + let mutable h = None + (fun f -> h <- Some f), (fun st -> match h with Some f -> f st | None -> ufailwith st "u_hole: unfilled hole") //--------------------------------------------------------------------------- // Pickle/unpickle F# interface data diff --git a/src/fsharp/TcGlobals.fs b/src/fsharp/TcGlobals.fs index 339e8b4b7fe..c70a378f6f2 100755 --- a/src/fsharp/TcGlobals.fs +++ b/src/fsharp/TcGlobals.fs @@ -1491,5 +1491,5 @@ type public TcGlobals(compilingFslib: bool, ilg:ILGlobals, fslibCcu: CcuThunk, d #if DEBUG // This global is only used during debug output -let global_g = ref (None : TcGlobals option) +let mutable global_g = None : TcGlobals option #endif diff --git a/src/fsharp/TypeChecker.fs b/src/fsharp/TypeChecker.fs index 5c5dae51b8d..76efb55e44e 100644 --- a/src/fsharp/TypeChecker.fs +++ b/src/fsharp/TypeChecker.fs @@ -3783,11 +3783,11 @@ let EliminateInitializationGraphs hash // The output of the analysis - let outOfOrder = ref false - let runtimeChecks = ref false - let directRecursiveData = ref false - let reportedEager = ref false - let definiteDependencies = ref [] + let mutable outOfOrder = false + let mutable runtimeChecks = false + let mutable directRecursiveData = false + let mutable reportedEager = false + let mutable definiteDependencies = [] let rec stripChooseAndExpr e = match stripExpr e with @@ -3883,21 +3883,21 @@ let EliminateInitializationGraphs | MaybeLazy -> if recursiveVals.TryFind v.Deref |> Option.isSome then warning (RecursiveUseCheckedAtRuntime (denv, v, m)) - if not !reportedEager then - (warning (LetRecCheckedAtRuntime m); reportedEager := true) - runtimeChecks := true + if not reportedEager then + (warning (LetRecCheckedAtRuntime m); reportedEager <- true) + runtimeChecks <- true | Top | DefinitelyStrict -> if recursiveVals.TryFind v.Deref |> Option.isSome then if availIfInOrder.TryFind v.Deref |> Option.isNone then warning (LetRecEvaluatedOutOfOrder (denv, boundv, v, m)) - outOfOrder := true - if not !reportedEager then - (warning (LetRecCheckedAtRuntime m); reportedEager := true) - definiteDependencies := (boundv, v) :: !definiteDependencies + outOfOrder <- true + if not reportedEager then + (warning (LetRecCheckedAtRuntime m); reportedEager <- true) + definiteDependencies <- (boundv, v) :: definiteDependencies | InnerTop -> if recursiveVals.TryFind v.Deref |> Option.isSome then - directRecursiveData := true + directRecursiveData <- true | DefinitelyLazy -> () and checkDelayed st b = match st with @@ -3919,11 +3919,11 @@ let EliminateInitializationGraphs // ddg = definiteDependencyGraph let ddgNodes = recursiveVals.Values |> Seq.toList |> List.map mkLocalValRef - let ddg = Graph((fun v -> v.Stamp), ddgNodes, !definiteDependencies ) + let ddg = Graph((fun v -> v.Stamp), ddgNodes, definiteDependencies ) ddg.IterateCycles (fun path -> error (LetRecUnsound (denv, path, path.Head.Range))) - let requiresLazyBindings = !runtimeChecks || !outOfOrder - if !directRecursiveData && requiresLazyBindings then + let requiresLazyBindings = runtimeChecks || outOfOrder + if directRecursiveData && requiresLazyBindings then error(Error(FSComp.SR.tcInvalidMixtureOfRecursiveForms(), bindsm)) if requiresLazyBindings then @@ -5176,9 +5176,9 @@ and ValidateOptArgOrder (spats: SynSimplePats) = let pats, m = getPats spats - let hitOptArg = ref false + let mutable hitOptArg = false - List.iter (fun pat -> if isOptArg pat then hitOptArg := true elif !hitOptArg then error(Error(FSComp.SR.tcOptionalArgsMustComeAfterNonOptionalArgs(), m))) pats + List.iter (fun pat -> if isOptArg pat then hitOptArg <- true elif hitOptArg then error(Error(FSComp.SR.tcOptionalArgsMustComeAfterNonOptionalArgs(), m))) pats /// Bind the patterns used in argument position for a function, method or lambda. @@ -5948,10 +5948,10 @@ and TcExprUndelayed cenv overallTy env tpenv (synExpr: SynExpr) = // Always allow subsumption if a nominal type is known prior to type checking any arguments let flex = not (isTyparTy cenv.g argty) - let first = ref true + let mutable first = true let getInitEnv m = - if !first then - first := false + if first then + first <- false env else { env with eContextInfo = ContextInfo.CollectionElement (isArray, m) } @@ -10524,8 +10524,8 @@ and TcMatchPattern cenv inputTy env tpenv (pat: SynPat, optWhenExpr) = patf' (TcPatPhase2Input (values, true)), optWhenExpr', NameMap.range vspecMap, envinner, tpenv and TcMatchClauses cenv inputTy resultTy env tpenv clauses = - let first = ref true - let isFirst() = if !first then first := false; true else false + let mutable first = true + let isFirst() = if first then first <- false; true else false List.mapFold (fun clause -> TcMatchClause cenv inputTy resultTy env (isFirst()) clause) tpenv clauses and TcMatchClause cenv inputTy resultTy env isFirst tpenv (Clause(pat, optWhenExpr, e, patm, spTgt)) = @@ -14456,7 +14456,7 @@ module TyconConstraintInference = // Repeatedly eliminate structural type definitions whose structural component types no longer support // comparison. On the way record type variables which are support the comparison relation. let rec loop (assumedTycons: Set) (assumedTypars: Set) = - let assumedTyparsAcc = ref assumedTypars + let mutable assumedTyparsAcc = assumedTypars // Checks if a field type supports the 'comparison' constraint based on the assumptions about the type constructors // and type parameters. @@ -14472,7 +14472,7 @@ module TyconConstraintInference = // Within structural types, type parameters can be optimistically assumed to have comparison // We record the ones for which we have made this assumption. elif tycon.TyparsNoRange |> List.exists (fun tp2 -> typarRefEq tp tp2) then - assumedTyparsAcc := (!assumedTyparsAcc).Add(tp.Stamp) + assumedTyparsAcc <- assumedTyparsAcc.Add(tp.Stamp) true else @@ -14552,10 +14552,10 @@ module TyconConstraintInference = res) - if newSet = assumedTycons && assumedTypars = !assumedTyparsAcc then - newSet, !assumedTyparsAcc + if newSet = assumedTycons && assumedTypars = assumedTyparsAcc then + newSet, assumedTyparsAcc else - loop newSet !assumedTyparsAcc + loop newSet assumedTyparsAcc let uneliminatedTycons, assumedTyparsActual = loop initialAssumedTycons initialAssumedTypars @@ -14587,7 +14587,7 @@ module TyconConstraintInference = // Repeatedly eliminate structural type definitions whose structural component types no longer support // equality. On the way add type variables which are support the equality relation let rec loop (assumedTycons: Set) (assumedTypars: Set) = - let assumedTyparsAcc = ref assumedTypars + let mutable assumedTyparsAcc = assumedTypars // Checks if a field type supports the 'equality' constraint based on the assumptions about the type constructors // and type parameters. @@ -14601,7 +14601,7 @@ module TyconConstraintInference = // Within structural types, type parameters can be optimistically assumed to have equality // We record the ones for which we have made this assumption. elif tycon.Typars(tycon.Range) |> List.exists (fun tp2 -> typarRefEq tp tp2) then - assumedTyparsAcc := (!assumedTyparsAcc).Add(tp.Stamp) + assumedTyparsAcc <- assumedTyparsAcc.Add(tp.Stamp) true else false @@ -14681,10 +14681,10 @@ module TyconConstraintInference = res) - if newSet = assumedTycons && assumedTypars = !assumedTyparsAcc then - newSet, !assumedTyparsAcc + if newSet = assumedTycons && assumedTypars = assumedTyparsAcc then + newSet, assumedTyparsAcc else - loop newSet !assumedTyparsAcc + loop newSet assumedTyparsAcc let uneliminatedTycons, assumedTyparsActual = loop initialAssumedTycons initialAssumedTypars @@ -17157,7 +17157,7 @@ let rec TcModuleOrNamespaceElementNonMutRec (cenv: cenv) parent typeNames scopem | SynModuleDecl.NamespaceFragment(SynModuleOrNamespace(longId, isRec, kind, defs, xml, attribs, vis, m)) -> - if !progress then dprintn ("Typecheck implementation " + textOfLid longId) + if progress then dprintn ("Typecheck implementation " + textOfLid longId) let endm = m.EndRange do for id in longId do diff --git a/src/fsharp/ast.fs b/src/fsharp/ast.fs index f5a8382505a..fa01b51673f 100644 --- a/src/fsharp/ast.fs +++ b/src/fsharp/ast.fs @@ -2229,7 +2229,7 @@ let noInferredTypars = SynValTyparDecls([], false, []) type LexerIfdefStackEntry = IfDefIf | IfDefElse type LexerIfdefStackEntries = (LexerIfdefStackEntry * range) list -type LexerIfdefStack = LexerIfdefStackEntries ref +type LexerIfdefStack = LexerIfdefStackEntries /// Specifies how the 'endline' function in the lexer should continue after /// it reaches end of line or eof. The options are to continue with 'token' function diff --git a/src/fsharp/fsc.fs b/src/fsharp/fsc.fs index ba9e1bcd8d2..17c8a219314 100644 --- a/src/fsharp/fsc.fs +++ b/src/fsharp/fsc.fs @@ -209,13 +209,13 @@ let AdjustForScriptCompile(ctok, tcConfigB: TcConfigBuilder, commandLineSourceFi commandLineSourceFiles |> List.map combineFilePath - let allSources = ref [] + let mutable allSources = [] let tcConfig = TcConfig.Create(tcConfigB, validate=false) let AddIfNotPresent(filename: string) = - if not(!allSources |> List.contains filename) then - allSources := filename :: !allSources + if not(allSources |> List.contains filename) then + allSources <- filename :: allSources let AppendClosureInformation filename = if IsScript filename then @@ -240,16 +240,16 @@ let AdjustForScriptCompile(ctok, tcConfigB: TcConfigBuilder, commandLineSourceFi // Find closure of .fsx files. commandLineSourceFiles |> List.iter AppendClosureInformation - List.rev !allSources + List.rev allSources let ProcessCommandLineFlags (tcConfigB: TcConfigBuilder, setProcessThreadLocals, lcidFromCodePage, argv) = - let inputFilesRef = ref ([] : string list) + let mutable inputFilesRef = [] let collect name = let lower = String.lowercase name if List.exists (Filename.checkSuffix lower) [".resx"] then error(Error(FSComp.SR.fscResxSourceFileDeprecated name, rangeStartup)) else - inputFilesRef := name :: !inputFilesRef + inputFilesRef <- name :: inputFilesRef let abbrevArgs = GetAbbrevFlagSet tcConfigB true // This is where flags are interpreted by the command line fsc.exe. @@ -268,7 +268,7 @@ let ProcessCommandLineFlags (tcConfigB: TcConfigBuilder, setProcessThreadLocals, if tcConfigB.pathMap <> PathMap.empty then error(Error(FSComp.SR.fscPathMapDebugRequiresPortablePdb(), rangeCmdArgs)) - let inputFiles = List.rev !inputFilesRef + let inputFiles = List.rev inputFilesRef // Check if we have a codepage from the console match tcConfigB.lcid with @@ -381,11 +381,11 @@ module XmlDocWriter = if not (Filename.hasSuffixCaseInsensitive "xml" xmlfile ) then error(Error(FSComp.SR.docfileNoXmlSuffix(), Range.rangeStartup)) (* the xmlDocSigOf* functions encode type into string to be used in "id" *) - let members = ref [] + let mutable members = [] let addMember id xmlDoc = if hasDoc xmlDoc then let doc = getDoc xmlDoc - members := (id, doc) :: !members + members <- (id, doc) :: members let doVal (v: Val) = addMember v.XmlDocSig v.XmlDoc let doUnionCase (uc: UnionCase) = addMember uc.XmlDocSig uc.XmlDoc let doField (rf: RecdField) = addMember rf.XmlDocSig rf.XmlDoc @@ -422,7 +422,7 @@ module XmlDocWriter = fprintfn os ("") fprintfn os ("%s") assemblyName fprintfn os ("") - !members |> List.iter (fun (id, doc) -> + members |> List.iter (fun (id, doc) -> fprintfn os "" id fprintfn os "%s" doc fprintfn os "") @@ -1298,10 +1298,10 @@ module StaticLinker = let assumedIndependentSet = set [ "mscorlib"; "System"; "System.Core"; "System.Xml"; "Microsoft.Build.Framework"; "Microsoft.Build.Utilities" ] begin - let remaining = ref (computeILRefs ilxMainModule).AssemblyReferences - while not (isNil !remaining) do - let ilAssemRef = List.head !remaining - remaining := List.tail !remaining + let mutable remaining = (computeILRefs ilxMainModule).AssemblyReferences + while not (isNil remaining) do + let ilAssemRef = List.head remaining + remaining <- List.tail remaining if assumedIndependentSet.Contains ilAssemRef.Name || (ilAssemRef.PublicKey = Some ecmaPublicKey) then depModuleTable.[ilAssemRef.Name] <- dummyEntry ilAssemRef.Name else @@ -1358,7 +1358,7 @@ module StaticLinker = visited = false } // Push the new work items - remaining := refs.AssemblyReferences @ !remaining + remaining <- refs.AssemblyReferences @ remaining | None -> warning(Error(FSComp.SR.fscAssumeStaticLinkContainsNoDependencies(ilAssemRef.Name), rangeStartup)) @@ -1384,14 +1384,14 @@ module StaticLinker = | None -> error(Error(FSComp.SR.fscAssemblyNotFoundInDependencySet n, rangeStartup)) ] - let remaining = ref roots - [ while not (isNil !remaining) do - let n = List.head !remaining - remaining := List.tail !remaining + let mutable remaining = roots + [ while not (isNil remaining) do + let n = List.head remaining + remaining <- List.tail remaining if not n.visited then if verbose then dprintn ("Module "+n.name+" depends on "+GetFSharpCoreLibraryName()) n.visited <- true - remaining := n.edges @ !remaining + remaining <- n.edges @ remaining yield (n.ccu, n.data) ] // Add all provider-generated assemblies into the static linking set diff --git a/src/fsharp/fsi/console.fs b/src/fsharp/fsi/console.fs index 04147be7696..0ffab65b355 100644 --- a/src/fsharp/fsi/console.fs +++ b/src/fsharp/fsi/console.fs @@ -215,13 +215,13 @@ type internal ReadLineConsole() = checkLeftEdge true /// Cursor anchor - position of !anchor when the routine was called - let anchor = ref (Anchor.Current x.Inset) + let mutable anchor = Anchor.Current x.Inset /// Length of the output currently rendered on screen. - let rendered = ref 0 + let mutable rendered = 0 /// Input has changed, therefore options cache is invalidated. - let changed = ref false + let mutable changed = false /// Cache of optionsCache - let optionsCache = ref (new Options()) + let mutable optionsCache = Options() let writeBlank() = Console.Write(' ') @@ -229,26 +229,26 @@ type internal ReadLineConsole() = let writeChar(c) = if Console.CursorTop = Console.BufferHeight - 1 && Console.CursorLeft = Console.BufferWidth - 1 then //printf "bottom right!\n" - anchor := { !anchor with top = (!anchor).top - 1 } + anchor <- { anchor with top = (anchor).top - 1 } checkLeftEdge true if (Char.IsControl(c)) then let s = x.MapCharacter(c) Console.Write(s) - rendered := !rendered + s.Length + rendered <- rendered + s.Length else Console.Write(c) - rendered := !rendered + 1 + rendered <- rendered + 1 checkLeftEdge true /// The console input buffer. let input = new StringBuilder() /// Current position - index into the input buffer - let current = ref 0 + let mutable current = 0 let render() = //printf "render\n" - let curr = !current - (!anchor).PlaceAt(x.Inset,0) + let curr = current + anchor.PlaceAt(x.Inset,0) let output = new StringBuilder() let mutable position = -1 for i = 0 to input.Length - 1 do @@ -264,59 +264,59 @@ type internal ReadLineConsole() = position <- output.Length // render the current text, computing a new value for "rendered" - let old_rendered = !rendered - rendered := 0 + let old_rendered = rendered + rendered <- 0 for i = 0 to input.Length - 1 do writeChar(input.Chars(i)) // blank out any dangling old text - for i = !rendered to old_rendered - 1 do + for i = rendered to old_rendered - 1 do writeBlank() - (!anchor).PlaceAt(x.Inset,position) + anchor.PlaceAt(x.Inset,position) render() let insertChar(c:char) = - if (!current = input.Length) then - current := !current + 1 + if (current = input.Length) then + current <- current + 1 input.Append(c) |> ignore writeChar(c) else - input.Insert(!current, c) |> ignore - current := !current + 1 + input.Insert(current, c) |> ignore + current <- current + 1 render() let insertTab() = - for i = ReadLineConsole.TabSize - (!current % ReadLineConsole.TabSize) downto 1 do + for i = ReadLineConsole.TabSize - (current % ReadLineConsole.TabSize) downto 1 do insertChar(' ') let moveLeft() = - if (!current > 0 && (!current - 1 < input.Length)) then - current := !current - 1 - let c = input.Chars(!current) + if (current > 0 && (current - 1 < input.Length)) then + current <- current - 1 + let c = input.Chars(current) Cursor.Move(x.Inset, - x.GetCharacterSize(c)) let moveRight() = - if (!current < input.Length) then - let c = input.Chars(!current) - current := !current + 1 + if (current < input.Length) then + let c = input.Chars(current) + current <- current + 1 Cursor.Move(x.Inset, x.GetCharacterSize(c)) let setInput(line:string) = input.Length <- 0 input.Append(line) |> ignore - current := input.Length + current <- input.Length render() let tabPress(shift) = let opts,prefix = - if !changed then - changed := false + if changed then + changed <- false x.GetOptions(input.ToString()) else - !optionsCache,false - optionsCache := opts + optionsCache,false + optionsCache <- opts if (opts.Count > 0) then let part = @@ -331,13 +331,13 @@ type internal ReadLineConsole() = insertTab() let delete() = - if (input.Length > 0 && !current < input.Length) then - input.Remove(!current, 1) |> ignore + if (input.Length > 0 && current < input.Length) then + input.Remove(current, 1) |> ignore render() let deleteToEndOfLine() = - if (!current < input.Length) then - input.Remove (!current, input.Length - !current) |> ignore + if (current < input.Length) then + input.Remove (current, input.Length - current) |> ignore render() let insert(key: ConsoleKeyInfo) = @@ -349,9 +349,9 @@ type internal ReadLineConsole() = insertChar(c) let backspace() = - if (input.Length > 0 && !current > 0) then - input.Remove(!current - 1, 1) |> ignore - current := !current - 1 + if (input.Length > 0 && current > 0) then + input.Remove(current - 1, 1) |> ignore + current <- current - 1 render() let enter() = @@ -394,24 +394,24 @@ type internal ReadLineConsole() = setInput String.Empty change() | ConsoleKey.Home -> - current := 0 - (!anchor).PlaceAt(x.Inset,0) + current <- 0 + anchor.PlaceAt(x.Inset,0) change() | ConsoleKey.End -> - current := input.Length - (!anchor).PlaceAt(x.Inset,!rendered) + current <- input.Length + anchor.PlaceAt(x.Inset,rendered) change() | _ -> match (key.Modifiers, key.KeyChar) with // Control-A | (ConsoleModifiers.Control, '\001') -> - current := 0 - (!anchor).PlaceAt(x.Inset,0) + current <- 0 + anchor.PlaceAt(x.Inset,0) change () // Control-E | (ConsoleModifiers.Control, '\005') -> - current := input.Length - (!anchor).PlaceAt(x.Inset,!rendered) + current <-input.Length + anchor.PlaceAt(x.Inset,rendered) change () // Control-B | (ConsoleModifiers.Control, '\002') -> @@ -452,6 +452,6 @@ type internal ReadLineConsole() = read() and change() = - changed := true + changed <- true read() read() diff --git a/src/fsharp/fsi/fsi.fs b/src/fsharp/fsi/fsi.fs index 60f53b3964f..f4443411602 100644 --- a/src/fsharp/fsi/fsi.fs +++ b/src/fsharp/fsi/fsi.fs @@ -788,7 +788,7 @@ let internal SetCurrentUICultureForThread (lcid : int option) = //---------------------------------------------------------------------------- let internal InstallErrorLoggingOnThisThread errorLogger = - if !progress then dprintfn "Installing logger on id=%d name=%s" Thread.CurrentThread.ManagedThreadId Thread.CurrentThread.Name + if progress then dprintfn "Installing logger on id=%d name=%s" Thread.CurrentThread.ManagedThreadId Thread.CurrentThread.Name SetThreadErrorLoggerNoUnwind(errorLogger) SetThreadBuildPhaseNoUnwind(BuildPhase.Interactive) @@ -798,7 +798,7 @@ let internal SetServerCodePages(fsiOptions: FsiCommandLineOptions) = match fsiOptions.FsiServerInputCodePage, fsiOptions.FsiServerOutputCodePage with | None,None -> () | inputCodePageOpt,outputCodePageOpt -> - let successful = ref false + let mutable successful = false Async.Start (async { do match inputCodePageOpt with | None -> () | Some(n:int) -> @@ -813,9 +813,9 @@ let internal SetServerCodePages(fsiOptions: FsiCommandLineOptions) = // Note this modifies the real honest-to-goodness settings for the current shell. // and the modifications hang around even after the process has exited. Console.OutputEncoding <- encoding - do successful := true }); + do successful <- true }); for pause in [10;50;100;1000;2000;10000] do - if not !successful then + if not successful then Thread.Sleep(pause); #if LOGGING_GUI if not !successful then @@ -871,17 +871,17 @@ type internal FsiConsoleInput(fsi: FsiEvaluationSessionHostConfig, fsiOptions: F match consoleOpt with | Some console when fsiOptions.EnableConsoleKeyProcessing && not fsiOptions.IsInteractiveServer -> if List.isEmpty fsiOptions.SourceFiles then - if !progress then fprintfn outWriter "first-line-reader-thread reading first line..."; + if progress then fprintfn outWriter "first-line-reader-thread reading first line..."; firstLine <- Some(console()); - if !progress then fprintfn outWriter "first-line-reader-thread got first line = %A..." firstLine; + if progress then fprintfn outWriter "first-line-reader-thread got first line = %A..." firstLine; consoleReaderStartupDone.Set() |> ignore - if !progress then fprintfn outWriter "first-line-reader-thread has set signal and exited." ; + if progress then fprintfn outWriter "first-line-reader-thread has set signal and exited." ; | _ -> ignore(inReader.Peek()); consoleReaderStartupDone.Set() |> ignore )).Start() else - if !progress then fprintfn outWriter "first-line-reader-thread not in use." + if progress then fprintfn outWriter "first-line-reader-thread not in use." consoleReaderStartupDone.Set() |> ignore /// Try to get the first line, if we snarfed it while probing. @@ -969,7 +969,7 @@ type internal FsiDynamicCompiler /// Add attributes let CreateModuleFragment (tcConfigB: TcConfigBuilder, assemblyName, codegenResults) = - if !progress then fprintfn fsiConsoleOutput.Out "Creating main module..."; + if progress then fprintfn fsiConsoleOutput.Out "Creating main module..."; let mainModule = mkILSimpleModule assemblyName (GetGeneratedILModuleName tcConfigB.target assemblyName) (tcConfigB.target = CompilerTarget.Dll) tcConfigB.subsystemVersion tcConfigB.useHighEntropyVA (mkILTypeDefs codegenResults.ilTypeDefs) None None 0x0 (mkILExportedTypes []) "" { mainModule with Manifest = @@ -1413,7 +1413,7 @@ type internal FsiInterruptController(fsiOptions: FsiCommandLineOptions, fsiConso // Also sleep to give computations a bit of time to terminate Thread.Sleep(pauseMilliseconds) if (killThreadRequest = ThreadAbortRequest) then - if !progress then fsiConsoleOutput.uprintnfn "%s" (FSIstrings.SR.fsiAbortingMainThread()) + if progress then fsiConsoleOutput.uprintnfn "%s" (FSIstrings.SR.fsiAbortingMainThread()) killThreadRequest <- NoRequest threadToKill.Abort() ()),Name="ControlCAbortThread") @@ -1504,7 +1504,7 @@ module internal MagicAssemblyResolution = // Grab the name of the assembly let tcConfig = TcConfig.Create(tcConfigB,validate=false) let simpleAssemName = fullAssemName.Split([| ',' |]).[0] - if !progress then fsiConsoleOutput.uprintfn "ATTEMPT MAGIC LOAD ON ASSEMBLY, simpleAssemName = %s" simpleAssemName // "Attempting to load a dynamically required assembly in response to an AssemblyResolve event by using known static assembly references..." + if progress then fsiConsoleOutput.uprintfn "ATTEMPT MAGIC LOAD ON ASSEMBLY, simpleAssemName = %s" simpleAssemName // "Attempting to load a dynamically required assembly in response to an AssemblyResolve event by using known static assembly references..." // Special case: Mono Windows Forms attempts to load an assembly called something like "Windows.Forms.resources" // We can't resolve this, so don't try. @@ -1545,12 +1545,12 @@ module internal MagicAssemblyResolution = | OkResult (warns, [r]) -> OkResult (warns, Choice1Of2 r.resolvedPath) | _ -> - if !progress then fsiConsoleOutput.uprintfn "ATTEMPT LOAD, assemblyReferenceTextDll = %s" assemblyReferenceTextDll + if progress then fsiConsoleOutput.uprintfn "ATTEMPT LOAD, assemblyReferenceTextDll = %s" assemblyReferenceTextDll /// Take a look through the files quoted, perhaps with explicit paths let searchResult = (tcConfig.referencedDLLs |> List.tryPick (fun assemblyReference -> - if !progress then fsiConsoleOutput.uprintfn "ATTEMPT MAGIC LOAD ON FILE, referencedDLL = %s" assemblyReference.Text + if progress then fsiConsoleOutput.uprintfn "ATTEMPT MAGIC LOAD ON FILE, referencedDLL = %s" assemblyReference.Text if System.String.Compare(Filename.fileNameOfPath assemblyReference.Text, assemblyReferenceTextDll,StringComparison.OrdinalIgnoreCase) = 0 || System.String.Compare(Filename.fileNameOfPath assemblyReference.Text, assemblyReferenceTextExe,StringComparison.OrdinalIgnoreCase) = 0 then Some(tcImports.TryResolveAssemblyReference (ctok, assemblyReference, ResolveAssemblyReferenceMode.Speculative)) @@ -1628,7 +1628,7 @@ type internal FsiStdinLexerProvider inputOption |> Option.iter (fun t -> fsiStdinSyphon.Add (t + "\n")) match inputOption with | Some(null) | None -> - if !progress then fprintfn fsiConsoleOutput.Out "End of file from TextReader.ReadLine" + if progress then fprintfn fsiConsoleOutput.Out "End of file from TextReader.ReadLine" 0 | Some (input:string) -> let input = input + "\n" @@ -1655,7 +1655,7 @@ type internal FsiStdinLexerProvider Lexhelp.resetLexbufPos sourceFileName lexbuf let skip = true // don't report whitespace from lexer let defines = "INTERACTIVE"::tcConfigB.conditionalCompilationDefines - let lexargs = mkLexargs (sourceFileName,defines, interactiveInputLightSyntaxStatus, lexResourceManager, ref [], errorLogger, PathMap.empty) + let lexargs = mkLexargs (sourceFileName,defines, interactiveInputLightSyntaxStatus, lexResourceManager, [], errorLogger, PathMap.empty) let tokenizer = LexFilter.LexFilter(interactiveInputLightSyntaxStatus, tcConfigB.compilingFslib, Lexer.token lexargs skip, lexbuf) tokenizer @@ -1756,15 +1756,15 @@ type internal FsiInteractionProcessor /// Parse one interaction. Called on the parser thread. let ParseInteraction (tokenizer:LexFilter.LexFilter) = - let lastToken = ref Parser.ELSE // Any token besides SEMICOLON_SEMICOLON will do for initial value + let mutable lastToken = Parser.ELSE // Any token besides SEMICOLON_SEMICOLON will do for initial value try - if !progress then fprintfn fsiConsoleOutput.Out "In ParseInteraction..." + if progress then fprintfn fsiConsoleOutput.Out "In ParseInteraction..." let input = Lexhelp.reusingLexbufForParsing tokenizer.LexBuffer (fun () -> let lexerWhichSavesLastToken lexbuf = let tok = tokenizer.Lexer lexbuf - lastToken := tok + lastToken <- tok tok Parser.interaction lexerWhichSavesLastToken tokenizer.LexBuffer) Some input @@ -1772,7 +1772,7 @@ type internal FsiInteractionProcessor // On error, consume tokens until to ;; or EOF. // Caveat: Unless the error parse ended on ;; - so check the lastToken returned by the lexer function. // Caveat: What if this was a look-ahead? That's fine! Since we need to skip to the ;; anyway. - if (match !lastToken with Parser.SEMICOLON_SEMICOLON -> false | _ -> true) then + if (match lastToken with Parser.SEMICOLON_SEMICOLON -> false | _ -> true) then let mutable tok = Parser.ELSE (* <-- any token <> SEMICOLON_SEMICOLON will do *) while (match tok with Parser.SEMICOLON_SEMICOLON -> false | _ -> true) && not tokenizer.LexBuffer.IsPastEndOfStream do @@ -1942,7 +1942,7 @@ type internal FsiInteractionProcessor let mainThreadProcessAction ctok action istate = try let tcConfig = TcConfig.Create(tcConfigB,validate=false) - if !progress then fprintfn fsiConsoleOutput.Out "In mainThreadProcessAction..."; + if progress then fprintfn fsiConsoleOutput.Out "In mainThreadProcessAction..."; fsiInterruptController.InterruptAllowed <- InterruptCanRaiseException; let res = action ctok tcConfig istate fsiInterruptController.ClearInterruptRequest() @@ -2010,19 +2010,19 @@ type internal FsiInteractionProcessor fsiConsolePrompt.Print(); istate |> InteractiveCatch errorLogger (fun istate -> - if !progress then fprintfn fsiConsoleOutput.Out "entering ParseInteraction..."; + if progress then fprintfn fsiConsoleOutput.Out "entering ParseInteraction..."; // Parse the interaction. When FSI.EXE is waiting for input from the console the // parser thread is blocked somewhere deep this call. let action = ParseInteraction tokenizer - if !progress then fprintfn fsiConsoleOutput.Out "returned from ParseInteraction...calling runCodeOnMainThread..."; + if progress then fprintfn fsiConsoleOutput.Out "returned from ParseInteraction...calling runCodeOnMainThread..."; // After we've unblocked and got something to run we switch // over to the run-thread (e.g. the GUI thread) let res = istate |> runCodeOnMainThread (fun ctok istate -> mainThreadProcessParsedInteractions ctok errorLogger (action, istate) cancellationToken) - if !progress then fprintfn fsiConsoleOutput.Out "Just called runCodeOnMainThread, res = %O..." res; + if progress then fprintfn fsiConsoleOutput.Out "Just called runCodeOnMainThread, res = %O..." res; res) member __.CurrentState = currState @@ -2137,7 +2137,7 @@ type internal FsiInteractionProcessor // member processor.StartStdinReadAndProcessThread (errorLogger) = - if !progress then fprintfn fsiConsoleOutput.Out "creating stdinReaderThread"; + if progress then fprintfn fsiConsoleOutput.Out "creating stdinReaderThread"; let stdinReaderThread = new Thread(new ThreadStart(fun () -> @@ -2146,12 +2146,12 @@ type internal FsiInteractionProcessor try try let initialTokenizer = fsiStdinLexerProvider.CreateStdinLexer(errorLogger) - if !progress then fprintfn fsiConsoleOutput.Out "READER: stdin thread started..."; + if progress then fprintfn fsiConsoleOutput.Out "READER: stdin thread started..."; // Delay until we've peeked the input or read the entire first line fsiStdinLexerProvider.ConsoleInput.WaitForInitialConsoleInput() - if !progress then fprintfn fsiConsoleOutput.Out "READER: stdin thread got first line..."; + if progress then fprintfn fsiConsoleOutput.Out "READER: stdin thread got first line..."; let runCodeOnMainThread = runCodeOnEventLoop errorLogger @@ -2172,12 +2172,12 @@ type internal FsiInteractionProcessor loop initialTokenizer - if !progress then fprintfn fsiConsoleOutput.Out "- READER: Exiting stdinReaderThread"; + if progress then fprintfn fsiConsoleOutput.Out "- READER: Exiting stdinReaderThread"; with e -> stopProcessingRecovery e range0; finally - if !progress then fprintfn fsiConsoleOutput.Out "- READER: Exiting process because of failure/exit on stdinReaderThread"; + if progress then fprintfn fsiConsoleOutput.Out "- READER: Exiting process because of failure/exit on stdinReaderThread"; // REVIEW: On some flavors of Mono, calling exit may freeze the process if we're using the WinForms event handler // Basically, on Mono 2.6.3, the GUI thread may be left dangling on exit. At that point: // -- System.Environment.Exit will cause the process to stop responding @@ -2200,7 +2200,7 @@ type internal FsiInteractionProcessor ),Name="StdinReaderThread") - if !progress then fprintfn fsiConsoleOutput.Out "MAIN: starting stdin thread..." + if progress then fprintfn fsiConsoleOutput.Out "MAIN: starting stdin thread..." stdinReaderThread.Start() member __.CompletionsForPartialLID (istate, prefix:string) = @@ -2256,11 +2256,11 @@ let internal SpawnInteractiveServer /// This gives us a last chance to catch an abort on the main execution thread. let internal DriveFsiEventLoop (fsi: FsiEvaluationSessionHostConfig, fsiConsoleOutput: FsiConsoleOutput) = let rec runLoop() = - if !progress then fprintfn fsiConsoleOutput.Out "GUI thread runLoop"; + if progress then fprintfn fsiConsoleOutput.Out "GUI thread runLoop"; let restart = try // BLOCKING POINT: The GUI Thread spends most (all) of its time this event loop - if !progress then fprintfn fsiConsoleOutput.Out "MAIN: entering event loop..."; + if progress then fprintfn fsiConsoleOutput.Out "MAIN: entering event loop..."; fsi.EventLoopRun() with | :? ThreadAbortException -> @@ -2274,7 +2274,7 @@ let internal DriveFsiEventLoop (fsi: FsiEvaluationSessionHostConfig, fsiConsoleO stopProcessingRecovery e range0; true // Try again, just case we can restart - if !progress then fprintfn fsiConsoleOutput.Out "MAIN: exited event loop..."; + if progress then fprintfn fsiConsoleOutput.Out "MAIN: exited event loop..."; if restart then runLoop() runLoop(); @@ -2656,7 +2656,7 @@ type FsiEvaluationSession (fsi: FsiEvaluationSessionHostConfig, argv:string[], i [] member x.Run() = - progress := condition "FSHARP_INTERACTIVE_PROGRESS" + progress <- condition "FSHARP_INTERACTIVE_PROGRESS" // Explanation: When Run is called we do a bunch of processing. For fsi.exe // and fsiAnyCpu.exe there are no other active threads at this point, so we can assume this is the @@ -2674,7 +2674,7 @@ type FsiEvaluationSession (fsi: FsiEvaluationSessionHostConfig, argv:string[], i if fsiOptions.Interact then // page in the type check env fsiInteractionProcessor.LoadDummyInteraction(ctokStartup, errorLogger) - if !progress then fprintfn fsiConsoleOutput.Out "MAIN: InstallKillThread!"; + if progress then fprintfn fsiConsoleOutput.Out "MAIN: InstallKillThread!"; // Compute how long to pause before a ThreadAbort is actually executed. // A somewhat arbitrary choice. @@ -2682,7 +2682,7 @@ type FsiEvaluationSession (fsi: FsiEvaluationSessionHostConfig, argv:string[], i // Request that ThreadAbort interrupts be performed on this (current) thread fsiInterruptController.InstallKillThread(Thread.CurrentThread, pauseMilliseconds) - if !progress then fprintfn fsiConsoleOutput.Out "MAIN: got initial state, creating form"; + if progress then fprintfn fsiConsoleOutput.Out "MAIN: got initial state, creating form"; #if !FX_NO_APP_DOMAINS // Route background exceptions to the exception handlers @@ -2699,10 +2699,10 @@ type FsiEvaluationSession (fsi: FsiEvaluationSessionHostConfig, argv:string[], i DriveFsiEventLoop (fsi, fsiConsoleOutput ) else // not interact - if !progress then fprintfn fsiConsoleOutput.Out "Run: not interact, loading initial files..." + if progress then fprintfn fsiConsoleOutput.Out "Run: not interact, loading initial files..." fsiInteractionProcessor.LoadInitialFiles(ctokRun, errorLogger) - if !progress then fprintfn fsiConsoleOutput.Out "Run: done..." + if progress then fprintfn fsiConsoleOutput.Out "Run: done..." exit (min errorLogger.ErrorCount 1) // The Ctrl-C exception handler that we've passed to native code has diff --git a/src/fsharp/fsi/fsimain.fs b/src/fsharp/fsi/fsimain.fs index 653485ac18b..3fdf044f782 100644 --- a/src/fsharp/fsi/fsimain.fs +++ b/src/fsharp/fsi/fsimain.fs @@ -65,13 +65,13 @@ type WinFormsEventLoop() = do mainForm.DoCreateHandle() let mutable lcid = None // Set the default thread exception handler - let restart = ref false + let mutable restart = false member __.LCID with get () = lcid and set v = lcid <- v interface IEventLoop with member x.Run() = - restart := false + restart <- false Application.Run() - !restart + restart member x.Invoke (f: unit -> 'T) : 'T = if not mainForm.InvokeRequired then f() @@ -79,7 +79,7 @@ type WinFormsEventLoop() = // Workaround: Mono's Control.Invoke returns a null result. Hence avoid the problem by // transferring the resulting state using a mutable location. - let mainFormInvokeResultHolder = ref None + let mutable mainFormInvokeResultHolder = None // Actually, Mono's Control.Invoke isn't even blocking (or wasn't on 1.1.15)! So use a signal to indicate completion. // Indeed, we should probably do this anyway with a timeout so we can report progress from @@ -94,7 +94,7 @@ type WinFormsEventLoop() = // When we get called back, someone may jack our culture // So we must reset our UI culture every time use _scope = SetCurrentUICultureForThread lcid - mainFormInvokeResultHolder := Some(f ()) + mainFormInvokeResultHolder <- Some(f ()) finally doneSignal.Set() |> ignore)) |> ignore @@ -103,9 +103,9 @@ type WinFormsEventLoop() = () // if !progress then fprintf outWriter "." outWriter.Flush() //if !progress then fprintfn outWriter "RunCodeOnWinFormsMainThread: Got completion signal, res = %b" (Option.isSome !mainFormInvokeResultHolder) - !mainFormInvokeResultHolder |> Option.get + mainFormInvokeResultHolder |> Option.get - member x.ScheduleRestart() = restart := true; Application.Exit() + member x.ScheduleRestart() = restart <- true; Application.Exit() /// Try to set the unhandled exception mode of System.Windows.Forms let internal TrySetUnhandledExceptionMode() = diff --git a/src/fsharp/lex.fsl b/src/fsharp/lex.fsl index a3a0003f57e..397e1487694 100644 --- a/src/fsharp/lex.fsl +++ b/src/fsharp/lex.fsl @@ -412,67 +412,67 @@ rule token args skip = parse | SingleChar(c) -> CHAR (char c) | _ -> fail args lexbuf (FSComp.SR.lexThisUnicodeOnlyInStringLiterals()) (CHAR (char 0)) } | "(*IF-FSHARP" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "(*F#" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "ENDIF-FSHARP*)" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "F#*)" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "(*)" { LPAREN_STAR_RPAREN } | "(*" { let m = lexbuf.LexemeRange - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,1,m))) else comment (1,m,args) skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,1,m))) else comment (1,m,args) skip lexbuf } | "(*IF-CAML*)" | "(*IF-OCAML*)" { let m = lexbuf.LexemeRange - if not skip then (COMMENT (LexCont.MLOnly(!args.ifdefStack,m))) else mlOnly m args skip lexbuf } + if not skip then (COMMENT (LexCont.MLOnly(args.ifdefStack,m))) else mlOnly m args skip lexbuf } | '"' { let buf,fin,m = startString args lexbuf - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string (buf,fin,m,args) skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string (buf,fin,m,args) skip lexbuf } | '"' '"' '"' { let buf,fin,m = startString args lexbuf - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString (buf,fin,m,args) skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString (buf,fin,m,args) skip lexbuf } | '$' '"' - { fail args lexbuf (FSComp.SR.lexTokenReserved()) (WHITESPACE (LexCont.Token !args.ifdefStack)) } + { fail args lexbuf (FSComp.SR.lexTokenReserved()) (WHITESPACE (LexCont.Token args.ifdefStack)) } | '@' '"' { let buf,fin,m = startString args lexbuf - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString (buf,fin,m,args) skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString (buf,fin,m,args) skip lexbuf } | truewhite+ { if skip then token args skip lexbuf - else WHITESPACE (LexCont.Token !args.ifdefStack) } + else WHITESPACE (LexCont.Token args.ifdefStack) } | offwhite+ { if args.lightSyntaxStatus.Status then errorR(Error(FSComp.SR.lexTabsNotAllowed(),lexbuf.LexemeRange)) - if not skip then (WHITESPACE (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if not skip then (WHITESPACE (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "////" op_char* { // 4+ slash are 1-line comments, online 3 slash are XmlDoc let m = lexbuf.LexemeRange - if not skip then (LINE_COMMENT (LexCont.SingleLineComment(!args.ifdefStack,1,m))) else singleLineComment (None,1,m,args) skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.SingleLineComment(args.ifdefStack,1,m))) else singleLineComment (None,1,m,args) skip lexbuf } | "///" op_char* { // Match exactly 3 slash, 4+ slash caught by preceding rule let m = lexbuf.LexemeRange let doc = lexemeTrimLeft lexbuf 3 let sb = (new StringBuilder(100)).Append(doc) - if not skip then (LINE_COMMENT (LexCont.SingleLineComment(!args.ifdefStack,1,m))) else singleLineComment (Some sb,1,m,args) skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.SingleLineComment(args.ifdefStack,1,m))) else singleLineComment (Some sb,1,m,args) skip lexbuf } | "//" op_char* { // Need to read all operator symbols too, otherwise it might be parsed by a rule below let m = lexbuf.LexemeRange - if not skip then (LINE_COMMENT (LexCont.SingleLineComment(!args.ifdefStack,1,m))) else singleLineComment (None,1,m,args) skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.SingleLineComment(args.ifdefStack,1,m))) else singleLineComment (None,1,m,args) skip lexbuf } | newline - { newline lexbuf; if not skip then (WHITESPACE (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { newline lexbuf; if not skip then (WHITESPACE (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | '`' '`' ([^'`' '\n' '\r' '\t'] | '`' [^'`''\n' '\r' '\t']) + '`' '`' { Keywords.IdentifierToken args lexbuf (lexemeTrimBoth lexbuf 2 2) } @@ -524,7 +524,7 @@ rule token args skip = parse else // add a newline when we don't apply a directive since we consumed a newline getting here newline lexbuf - (HASH_LINE (LexCont.Token !args.ifdefStack)) } + (HASH_LINE (LexCont.Token args.ifdefStack)) } | "<@" { checkExprOp lexbuf; LQUOTE ("<@ @>", false) } | "<@@" { checkExprOp lexbuf; LQUOTE ("<@@ @@>", true) } @@ -589,7 +589,7 @@ rule token args skip = parse | "#!" op_char* { // Treat shebangs like regular comments, but they are only allowed at the start of a file let m = lexbuf.LexemeRange - let tok = shouldStartFile args lexbuf m (0,FSComp.SR.lexHashBangMustBeFirstInFile()) (LINE_COMMENT (LexCont.SingleLineComment(!args.ifdefStack,1,m))) + let tok = shouldStartFile args lexbuf m (0,FSComp.SR.lexHashBangMustBeFirstInFile()) (LINE_COMMENT (LexCont.SingleLineComment(args.ifdefStack,1,m))) if not skip then tok else singleLineComment (None,1,m,args) skip lexbuf } | "#light" anywhite* @@ -598,59 +598,59 @@ rule token args skip = parse warning(Error((0,"#light should only occur as the first non-comment text in an F# source file"),lexbuf.LexemeRange)) // TODO unreachable error above, I think? - brianmcn args.lightSyntaxStatus.Status <- true - if not skip then (HASH_LIGHT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if not skip then (HASH_LIGHT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | ("#indent" | "#light") anywhite+ "\"off\"" { args.lightSyntaxStatus.Status <- false mlCompatWarning (FSComp.SR.lexIndentOffForML()) lexbuf.LexemeRange - if not skip then (HASH_LIGHT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if not skip then (HASH_LIGHT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | anywhite* "#if" anywhite+ anystring { let m = lexbuf.LexemeRange let lookup id = List.contains id args.defines let lexed = lexeme lexbuf let isTrue = evalIfDefExpression lexbuf.StartPos lexbuf.SupportsFeature args lookup lexed - args.ifdefStack := (IfDefIf,m) :: !(args.ifdefStack) + args.ifdefStack <- (IfDefIf,m) :: args.ifdefStack // Get the token; make sure it starts at zero position & return let cont, f = - ( if isTrue then (LexCont.EndLine(LexerEndlineContinuation.Token(!args.ifdefStack)), endline (LexerEndlineContinuation.Token !args.ifdefStack) args skip) - else (LexCont.EndLine(LexerEndlineContinuation.Skip(!args.ifdefStack,0,m)), endline (LexerEndlineContinuation.Skip(!args.ifdefStack,0,m)) args skip) ) + ( if isTrue then (LexCont.EndLine(LexerEndlineContinuation.Token(args.ifdefStack)), endline (LexerEndlineContinuation.Token args.ifdefStack) args skip) + else (LexCont.EndLine(LexerEndlineContinuation.Skip(args.ifdefStack,0,m)), endline (LexerEndlineContinuation.Skip(args.ifdefStack,0,m)) args skip) ) let tok = shouldStartLine args lexbuf m (FSComp.SR.lexHashIfMustBeFirst()) (HASH_IF(m,lexed,cont)) if not skip then tok else f lexbuf } | anywhite* "#else" anywhite* ("//" [^'\n''\r']*)? { let lexed = (lexeme lexbuf) - match !(args.ifdefStack) with + match args.ifdefStack with | [] -> LEX_FAILURE (FSComp.SR.lexHashElseNoMatchingIf()) | (IfDefElse,_) :: _rest -> LEX_FAILURE (FSComp.SR.lexHashEndifRequiredForElse()) | (IfDefIf,_) :: rest -> let m = lexbuf.LexemeRange - args.ifdefStack := (IfDefElse,m) :: rest - let tok = HASH_ELSE(m,lexed, LexCont.EndLine(LexerEndlineContinuation.Skip(!args.ifdefStack,0,m))) + args.ifdefStack <- (IfDefElse,m) :: rest + let tok = HASH_ELSE(m,lexed, LexCont.EndLine(LexerEndlineContinuation.Skip(args.ifdefStack,0,m))) let tok = shouldStartLine args lexbuf m (FSComp.SR.lexHashElseMustBeFirst()) tok - if not skip then tok else endline (LexerEndlineContinuation.Skip(!args.ifdefStack,0,m)) args skip lexbuf } + if not skip then tok else endline (LexerEndlineContinuation.Skip(args.ifdefStack,0,m)) args skip lexbuf } | anywhite* "#endif" anywhite* ("//" [^'\n''\r']*)? { let lexed = (lexeme lexbuf) let m = lexbuf.LexemeRange - match !(args.ifdefStack) with + match args.ifdefStack with | []-> LEX_FAILURE (FSComp.SR.lexHashEndingNoMatchingIf()) | _ :: rest -> - args.ifdefStack := rest - let tok = HASH_ENDIF(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(!args.ifdefStack))) + args.ifdefStack <- rest + let tok = HASH_ENDIF(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(args.ifdefStack))) let tok = shouldStartLine args lexbuf m (FSComp.SR.lexHashEndifMustBeFirst()) tok - if not skip then tok else endline (LexerEndlineContinuation.Token(!args.ifdefStack)) args skip lexbuf } + if not skip then tok else endline (LexerEndlineContinuation.Token(args.ifdefStack)) args skip lexbuf } | "#if" - { let tok = fail args lexbuf (FSComp.SR.lexHashIfMustHaveIdent()) (WHITESPACE (LexCont.Token !args.ifdefStack)) + { let tok = fail args lexbuf (FSComp.SR.lexHashIfMustHaveIdent()) (WHITESPACE (LexCont.Token args.ifdefStack)) if not skip then tok else token args skip lexbuf } | surrogateChar surrogateChar | _ { unexpectedChar lexbuf } | eof - { EOF (LexCont.Token !args.ifdefStack) } + { EOF (LexCont.Token args.ifdefStack) } // Skips INACTIVE code until if finds #else / #endif matching with the #if or #else @@ -660,10 +660,10 @@ and ifdefSkip n m args skip = parse // If #if is the first thing on the line then increase depth, otherwise skip, because it is invalid (e.g. "(**) #if ...") if (m.StartColumn <> 0) then - if not skip then (INACTIVECODE (LexCont.IfDefSkip(!args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf + if not skip then (INACTIVECODE (LexCont.IfDefSkip(args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf else - let tok = INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(!args.ifdefStack,n+1,m))) - if not skip then tok else endline (LexerEndlineContinuation.Skip(!args.ifdefStack,n+1,m)) args skip lexbuf } + let tok = INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(args.ifdefStack,n+1,m))) + if not skip then tok else endline (LexerEndlineContinuation.Skip(args.ifdefStack,n+1,m)) args skip lexbuf } | anywhite* "#else" anywhite* ("//" [^'\n''\r']*)? { let lexed = (lexeme lexbuf) @@ -671,17 +671,17 @@ and ifdefSkip n m args skip = parse // If #else is the first thing on the line then process it, otherwise ignore, because it is invalid (e.g. "(**) #else ...") if (m.StartColumn <> 0) then - if not skip then (INACTIVECODE (LexCont.IfDefSkip(!args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf + if not skip then (INACTIVECODE (LexCont.IfDefSkip(args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf elif n = 0 then - match !(args.ifdefStack) with + match args.ifdefStack with | []-> LEX_FAILURE (FSComp.SR.lexHashElseNoMatchingIf()) | (IfDefElse,_) :: _rest -> LEX_FAILURE (FSComp.SR.lexHashEndifRequiredForElse()) | (IfDefIf,_) :: rest -> let m = lexbuf.LexemeRange - args.ifdefStack := (IfDefElse,m) :: rest - if not skip then (HASH_ELSE(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(!args.ifdefStack)))) else endline (LexerEndlineContinuation.Token(!args.ifdefStack)) args skip lexbuf + args.ifdefStack <- (IfDefElse,m) :: rest + if not skip then (HASH_ELSE(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(args.ifdefStack)))) else endline (LexerEndlineContinuation.Token(args.ifdefStack)) args skip lexbuf else - if not skip then (INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(!args.ifdefStack,n,m)))) else endline (LexerEndlineContinuation.Skip(!args.ifdefStack,n,m)) args skip lexbuf } + if not skip then (INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(args.ifdefStack,n,m)))) else endline (LexerEndlineContinuation.Skip(args.ifdefStack,n,m)) args skip lexbuf } | anywhite* "#endif" anywhite* ("//" [^'\n''\r']*)? { let lexed = lexeme lexbuf @@ -689,17 +689,17 @@ and ifdefSkip n m args skip = parse // If #endif is the first thing on the line then process it, otherwise ignore, because it is invalid (e.g. "(**) #endif ...") if (m.StartColumn <> 0) then - if not skip then (INACTIVECODE (LexCont.IfDefSkip(!args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf + if not skip then (INACTIVECODE (LexCont.IfDefSkip(args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf elif n = 0 then - match !(args.ifdefStack) with + match args.ifdefStack with | [] -> LEX_FAILURE (FSComp.SR.lexHashEndingNoMatchingIf()) | _ :: rest -> - args.ifdefStack := rest - if not skip then (HASH_ENDIF(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(!args.ifdefStack)))) else endline (LexerEndlineContinuation.Token(!args.ifdefStack)) args skip lexbuf + args.ifdefStack <- rest + if not skip then (HASH_ENDIF(m,lexed,LexCont.EndLine(LexerEndlineContinuation.Token(args.ifdefStack)))) else endline (LexerEndlineContinuation.Token(args.ifdefStack)) args skip lexbuf else - let tok = INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(!args.ifdefStack,n-1,m))) + let tok = INACTIVECODE(LexCont.EndLine(LexerEndlineContinuation.Skip(args.ifdefStack,n-1,m))) let tok = shouldStartLine args lexbuf m (FSComp.SR.lexWrongNestedHashEndif()) tok - if not skip then tok else endline (LexerEndlineContinuation.Skip(!args.ifdefStack,(n-1),m)) args skip lexbuf } + if not skip then tok else endline (LexerEndlineContinuation.Skip(args.ifdefStack,(n-1),m)) args skip lexbuf } | newline { newline lexbuf; ifdefSkip n m args skip lexbuf } @@ -709,9 +709,9 @@ and ifdefSkip n m args skip = parse | surrogateChar surrogateChar | _ { // This tries to be nice and get tokens as 'words' because VS uses this when selecting stuff - if not skip then (INACTIVECODE (LexCont.IfDefSkip(!args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf } + if not skip then (INACTIVECODE (LexCont.IfDefSkip(args.ifdefStack,n,m))) else ifdefSkip n m args skip lexbuf } | eof - { EOF (LexCont.IfDefSkip(!args.ifdefStack,n,m)) } + { EOF (LexCont.IfDefSkip(args.ifdefStack,n,m)) } // Called after lexing #if IDENT/#else/#endif - this checks whether there is nothing except end of line // or end of file and then calls the lexing function specified by 'cont' - either token or ifdefSkip @@ -729,40 +729,40 @@ and endline cont args skip = parse } | [^'\r' '\n']+ | _ - { let tok = fail args lexbuf (FSComp.SR.pplexExpectedSingleLineComment()) (WHITESPACE (LexCont.Token !args.ifdefStack)) + { let tok = fail args lexbuf (FSComp.SR.pplexExpectedSingleLineComment()) (WHITESPACE (LexCont.Token args.ifdefStack)) if not skip then tok else token args skip lexbuf } and string sargs skip = parse | '\\' newline anywhite* { let (_buf,_fin,m,args) = sargs newline lexbuf - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | escape_char { let (buf,_fin,m,args) = sargs addByteChar buf (escape (lexeme lexbuf).[1]) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | trigraph { let (buf,_fin,m,args) = sargs let s = lexeme lexbuf addByteChar buf (trigraph s.[1] s.[2] s.[3]) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | hexGraphShort { let (buf,_fin,m,args) = sargs addUnicodeChar buf (int (hexGraphShort (lexemeTrimLeft lexbuf 2))) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | unicodeGraphShort { let (buf,_fin,m,args) = sargs addUnicodeChar buf (int (unicodeGraphShort (lexemeTrimLeft lexbuf 2))) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | unicodeGraphLong { let (buf,_fin,m,args) = sargs let hexChars = lexemeTrimLeft lexbuf 2 - let result () = if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf + let result () = if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf match unicodeGraphLong hexChars with | Invalid -> fail args lexbuf (FSComp.SR.lexInvalidUnicodeLiteral hexChars) (result ()) @@ -788,38 +788,38 @@ and string sargs skip = parse { let (buf,_fin,m,args) = sargs newline lexbuf addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | ident { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | integer | xinteger { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | anywhite + { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } | eof { let (_buf,_fin,m,args) = sargs - EOF (LexCont.String(!args.ifdefStack,m)) } + EOF (LexCont.String(args.ifdefStack,m)) } | surrogateChar surrogateChar // surrogate code points always come in pairs | _ { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.String(!args.ifdefStack,m))) else string sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.String(args.ifdefStack,m))) else string sargs skip lexbuf } and verbatimString sargs skip = parse | '"' '"' { let (buf,_fin,m,args) = sargs addByteChar buf '\"' - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } | '"' { let (buf,fin,_m,_args) = sargs @@ -835,32 +835,32 @@ and verbatimString sargs skip = parse { let (buf,_fin,m,args) = sargs newline lexbuf addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } | ident { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } | integer | xinteger { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } | anywhite + { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } | eof { let (_buf,_fin,m,args) = sargs - EOF (LexCont.VerbatimString(!args.ifdefStack,m)) } + EOF (LexCont.VerbatimString(args.ifdefStack,m)) } | surrogateChar surrogateChar // surrogate code points always come in pairs | _ { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.VerbatimString(!args.ifdefStack,m))) else verbatimString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.VerbatimString(args.ifdefStack,m))) else verbatimString sargs skip lexbuf } and tripleQuoteString sargs skip = parse | '"' '"' '"' @@ -872,33 +872,33 @@ and tripleQuoteString sargs skip = parse { let (buf,_fin,m,args) = sargs newline lexbuf addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } // The rest is to break into pieces to allow double-click-on-word and other such things | ident { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } | integer | xinteger { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } | anywhite + { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } | eof { let (_buf,_fin,m,args) = sargs - EOF (LexCont.TripleQuoteString(!args.ifdefStack,m)) } + EOF (LexCont.TripleQuoteString(args.ifdefStack,m)) } | surrogateChar surrogateChar // surrogate code points always come in pairs | _ { let (buf,_fin,m,args) = sargs addUnicodeString buf (lexeme lexbuf) - if not skip then (STRING_TEXT (LexCont.TripleQuoteString(!args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } + if not skip then (STRING_TEXT (LexCont.TripleQuoteString(args.ifdefStack,m))) else tripleQuoteString sargs skip lexbuf } // Parsing single-line comment - we need to split it into words for Visual Studio IDE and singleLineComment cargs skip = parse @@ -907,78 +907,78 @@ and singleLineComment cargs skip = parse trySaveXmlDoc lexbuf buff newline lexbuf // Saves the documentation (if we're collecting any) into a buffer-local variable. - if not skip then (LINE_COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | eof { let _, _n,_m,args = cargs // NOTE: it is legal to end a file with this comment, so we'll return EOF as a token - EOF (LexCont.Token !args.ifdefStack) } + EOF (LexCont.Token args.ifdefStack) } | [^ ' ' '\n' '\r' ]+ | anywhite+ { let buff,n,m,args = cargs // Append the current token to the XML documentation if we're collecting it tryAppendXmlDoc buff (lexeme lexbuf) - if not skip then (LINE_COMMENT (LexCont.SingleLineComment(!args.ifdefStack,n,m))) else singleLineComment (buff,n,m,args) skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.SingleLineComment(args.ifdefStack,n,m))) else singleLineComment (buff,n,m,args) skip lexbuf } | surrogateChar surrogateChar | _ { let _, _n,_m,args = cargs - if not skip then (LINE_COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if not skip then (LINE_COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } and comment cargs skip = parse | char { let n,m,args = cargs - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } | '"' { let n,m,args = cargs - if not skip then (COMMENT (LexCont.StringInComment(!args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.StringInComment(args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } | '"' '"' '"' { let n,m,args = cargs - if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(!args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } | '@' '"' { let n,m,args = cargs - if not skip then (COMMENT (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.VerbatimStringInComment(args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } | "(*)" { let n,m,args = cargs - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment cargs skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment cargs skip lexbuf } | '(' '*' { let n,m,args = cargs - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n+1,m))) else comment (n+1,m,args) skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n+1,m))) else comment (n+1,m,args) skip lexbuf } | newline { let n,m,args = cargs newline lexbuf - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment cargs skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment cargs skip lexbuf } | "*)" { let n,m,args = cargs - if n > 1 then if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n-1,m))) else comment (n-1,m,args) skip lexbuf - else if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + if n > 1 then if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n-1,m))) else comment (n-1,m,args) skip lexbuf + else if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | anywhite+ | [^ '\'' '(' '*' '\n' '\r' '"' ')' '@' ' ' '\t' ]+ { let n,m,args = cargs - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment cargs skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment cargs skip lexbuf } | eof { let n,m,args = cargs - EOF (LexCont.Comment(!args.ifdefStack,n,m)) } + EOF (LexCont.Comment(args.ifdefStack,n,m)) } | surrogateChar surrogateChar | _ { let n,m,args = cargs - if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } + if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } and stringInComment n m args skip = parse // Follow string lexing, skipping tokens until it finishes | '\\' newline anywhite* { newline lexbuf - if not skip then (COMMENT (LexCont.StringInComment(!args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.StringInComment(args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } | escape_char | trigraph @@ -989,69 +989,69 @@ and stringInComment n m args skip = parse | integer | xinteger | anywhite + - { if not skip then (COMMENT (LexCont.StringInComment(!args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.StringInComment(args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } | '"' - { if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } + { if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } | newline { newline lexbuf - if not skip then (COMMENT (LexCont.StringInComment(!args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.StringInComment(args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } | eof - { EOF (LexCont.StringInComment(!args.ifdefStack,n,m)) } + { EOF (LexCont.StringInComment(args.ifdefStack,n,m)) } | surrogateChar surrogateChar | _ - { if not skip then (COMMENT (LexCont.StringInComment(!args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.StringInComment(args.ifdefStack,n,m))) else stringInComment n m args skip lexbuf } and verbatimStringInComment n m args skip = parse // Follow verbatimString lexing, in short, skip double-quotes and other chars until we hit a single quote | '"' '"' - { if not skip then (COMMENT (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.VerbatimStringInComment(args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } | '"' - { if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } + { if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } | ident | integer | xinteger | anywhite + - { if not skip then (COMMENT (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.VerbatimStringInComment(args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } | newline { newline lexbuf - if not skip then (COMMENT (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.VerbatimStringInComment(args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } | eof - { EOF (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m)) } + { EOF (LexCont.VerbatimStringInComment(args.ifdefStack,n,m)) } | surrogateChar surrogateChar | _ - { if not skip then (COMMENT (LexCont.VerbatimStringInComment(!args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.VerbatimStringInComment(args.ifdefStack,n,m))) else verbatimStringInComment n m args skip lexbuf } and tripleQuoteStringInComment n m args skip = parse // Follow tripleQuoteString lexing | '"' '"' '"' - { if not skip then (COMMENT (LexCont.Comment(!args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } + { if not skip then (COMMENT (LexCont.Comment(args.ifdefStack,n,m))) else comment (n,m,args) skip lexbuf } | ident | integer | xinteger | anywhite + - { if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(!args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } | newline { newline lexbuf - if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(!args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } + if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } | eof - { EOF (LexCont.TripleQuoteStringInComment(!args.ifdefStack,n,m)) } + { EOF (LexCont.TripleQuoteStringInComment(args.ifdefStack,n,m)) } | surrogateChar surrogateChar | _ - { if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(!args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } + { if not skip then (COMMENT (LexCont.TripleQuoteStringInComment(args.ifdefStack,n,m))) else tripleQuoteStringInComment n m args skip lexbuf } and mlOnly m args skip = parse @@ -1059,17 +1059,17 @@ and mlOnly m args skip = parse { let buf = ByteBuffer.Create 100 let m2 = lexbuf.LexemeRange let _ = string (buf,defaultStringFinisher,m2,args) skip lexbuf - if not skip then (COMMENT (LexCont.MLOnly(!args.ifdefStack,m))) else mlOnly m args skip lexbuf } + if not skip then (COMMENT (LexCont.MLOnly(args.ifdefStack,m))) else mlOnly m args skip lexbuf } | newline - { newline lexbuf; if not skip then (COMMENT (LexCont.MLOnly(!args.ifdefStack,m))) else mlOnly m args skip lexbuf } + { newline lexbuf; if not skip then (COMMENT (LexCont.MLOnly(args.ifdefStack,m))) else mlOnly m args skip lexbuf } | "(*ENDIF-CAML*)" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | "(*ENDIF-OCAML*)" - { if not skip then (COMMENT (LexCont.Token !args.ifdefStack)) else token args skip lexbuf } + { if not skip then (COMMENT (LexCont.Token args.ifdefStack)) else token args skip lexbuf } | [^ '(' '"' '\n' '\r' ]+ - { if not skip then (COMMENT (LexCont.MLOnly(!args.ifdefStack,m))) else mlOnly m args skip lexbuf } + { if not skip then (COMMENT (LexCont.MLOnly(args.ifdefStack,m))) else mlOnly m args skip lexbuf } | eof - { EOF (LexCont.MLOnly(!args.ifdefStack,m)) } + { EOF (LexCont.MLOnly(args.ifdefStack,m)) } | surrogateChar surrogateChar | _ - { if not skip then (COMMENT (LexCont.MLOnly(!args.ifdefStack,m))) else mlOnly m args skip lexbuf } + { if not skip then (COMMENT (LexCont.MLOnly(args.ifdefStack,m))) else mlOnly m args skip lexbuf } diff --git a/src/fsharp/lexhelp.fs b/src/fsharp/lexhelp.fs index b25627e9a45..6201ec8a41e 100644 --- a/src/fsharp/lexhelp.fs +++ b/src/fsharp/lexhelp.fs @@ -49,7 +49,7 @@ type LexResourceManager() = /// Lexer parameters type lexargs = { defines: string list - ifdefStack: LexerIfdefStack + mutable ifdefStack: LexerIfdefStack resourceManager: LexResourceManager lightSyntaxStatus : LightSyntaxStatus errorLogger: ErrorLogger diff --git a/src/fsharp/lexhelp.fsi b/src/fsharp/lexhelp.fsi index f75ecd392d9..bcc1bc3dc8a 100644 --- a/src/fsharp/lexhelp.fsi +++ b/src/fsharp/lexhelp.fsi @@ -29,7 +29,7 @@ type LexResourceManager = type lexargs = { defines: string list - ifdefStack: LexerIfdefStack + mutable ifdefStack: LexerIfdefStack resourceManager: LexResourceManager lightSyntaxStatus : LightSyntaxStatus errorLogger: ErrorLogger diff --git a/src/fsharp/lib.fs b/src/fsharp/lib.fs index fba1c975e8d..b9653f35c7d 100755 --- a/src/fsharp/lib.fs +++ b/src/fsharp/lib.fs @@ -13,8 +13,8 @@ open FSharp.Compiler.AbstractIL.Internal.Library /// is this the developer-debug build? let debug = false let verbose = false -let progress = ref false -let tracking = ref false // intended to be a general hook to control diagnostic output when tracking down bugs +let mutable progress = false +let mutable tracking = false // intended to be a general hook to control diagnostic output when tracking down bugs let condition s = try (System.Environment.GetEnvironmentVariable(s) <> null) with _ -> false @@ -387,8 +387,19 @@ let inline cached cache resF = | _ -> cache.cacheVal +let inline cacheOptByref (cache: byref<'T option>) f = + match cache with + | Some v -> v + | None -> + let res = f() + cache <- Some res + res + +// REVIEW: this is only used because we want to mutate a record field, +// and because you cannot take a byref<_> of such a thing directly, +// we cannot use 'cacheOptByref'. If that is changed, this can be removed. let inline cacheOptRef cache f = - match !cache with + match !cache with | Some v -> v | None -> let res = f() diff --git a/src/fsharp/service/FSharpCheckerResults.fs b/src/fsharp/service/FSharpCheckerResults.fs index cd9b187d3ad..76585ba9d58 100644 --- a/src/fsharp/service/FSharpCheckerResults.fs +++ b/src/fsharp/service/FSharpCheckerResults.fs @@ -193,7 +193,7 @@ type internal TypeCheckInfo // We guarantee to only refine to a more nested environment. It may not be strictly // the right environment, but will always be at least as rich - let bestAlmostIncludedSoFar = ref None + let mutable bestAlmostIncludedSoFar = None sResolutions.CapturedEnvs |> ResizeArray.iter (fun (possm,env,ad) -> // take only ranges that strictly do not include cursorPos (all ranges that touch cursorPos were processed during 'Strict Inclusion' part) @@ -204,15 +204,15 @@ type internal TypeCheckInfo | None -> true if contained then - match !bestAlmostIncludedSoFar with + match bestAlmostIncludedSoFar with | Some (rightm:range,_,_) -> if posGt possm.End rightm.End || (posEq possm.End rightm.End && posGt possm.Start rightm.Start) then - bestAlmostIncludedSoFar := Some (possm,env,ad) - | _ -> bestAlmostIncludedSoFar := Some (possm,env,ad)) + bestAlmostIncludedSoFar <- Some (possm,env,ad) + | _ -> bestAlmostIncludedSoFar <- Some (possm,env,ad)) let resEnv = - match !bestAlmostIncludedSoFar, mostDeeplyNestedEnclosingScope with + match bestAlmostIncludedSoFar, mostDeeplyNestedEnclosingScope with | Some (_,env,ad), None -> env, ad | Some (_,almostIncludedEnv,ad), Some (_,mostDeeplyNestedEnv,_) when almostIncludedEnv.eFieldLabels.Count >= mostDeeplyNestedEnv.eFieldLabels.Count -> @@ -1525,7 +1525,7 @@ module internal ParseAndCheckFile = // When analyzing files using ParseOneFile, i.e. for the use of editing clients, we do not apply line directives. // TODO(pathmap): expose PathMap on the service API, and thread it through here - let lexargs = mkLexargs(fileName, defines, lightSyntaxStatus, lexResourceManager, ref [], errHandler.ErrorLogger, PathMap.empty) + let lexargs = mkLexargs(fileName, defines, lightSyntaxStatus, lexResourceManager, [], errHandler.ErrorLogger, PathMap.empty) let lexargs = { lexargs with applyLineDirectives = false } let tokenizer = LexFilter.LexFilter(lightSyntaxStatus, options.CompilingFsLib, Lexer.token lexargs true, lexbuf) diff --git a/src/fsharp/service/IncrementalBuild.fs b/src/fsharp/service/IncrementalBuild.fs index 365570b5d62..de773fd3aa3 100755 --- a/src/fsharp/service/IncrementalBuild.fs +++ b/src/fsharp/service/IncrementalBuild.fs @@ -125,10 +125,10 @@ module internal IncrementalBuild = | VectorBuildRule ve -> ve.Name // Ids of exprs - let nextid = ref 999 // Number ids starting with 1000 to discern them + let mutable nextid = 999 // Number ids starting with 1000 to discern them let NextId() = - nextid:=!nextid+1 - Id(!nextid) + nextid <- nextid + 1 + Id(nextid) type INode = abstract Name: string diff --git a/src/fsharp/service/ServiceAssemblyContent.fs b/src/fsharp/service/ServiceAssemblyContent.fs index a86b26db8a5..f3c27337ab2 100644 --- a/src/fsharp/service/ServiceAssemblyContent.fs +++ b/src/fsharp/service/ServiceAssemblyContent.fs @@ -860,8 +860,8 @@ module ParsedInput = // Based on an initial review, no diagnostics should be generated. However the code should be checked more closely. use _ignoreAllDiagnostics = new ErrorScope() - let result: (Scope * pos * (* finished *) bool) option ref = ref None - let ns: string[] option ref = ref None + let mutable result = None + let mutable ns = None let modules = ResizeArray() let inline longIdentToIdents ident = ident |> Seq.map (fun x -> string x) |> Seq.toArray @@ -873,17 +873,17 @@ module ParsedInput = let doRange kind (scope: LongIdent) line col = if line <= currentLine then - match !result, insertionPoint with + match result, insertionPoint with | None, _ -> - result := Some ({ Idents = longIdentToIdents scope; Kind = kind }, mkPos line col, false) + result <- Some ({ Idents = longIdentToIdents scope; Kind = kind }, mkPos line col, false) | Some (_, _, true), _ -> () | Some (oldScope, oldPos, false), OpenStatementInsertionPoint.TopLevel when kind <> OpenDeclaration -> - result := Some (oldScope, oldPos, true) + result <- Some (oldScope, oldPos, true) | Some (oldScope, oldPos, _), _ -> match kind, oldScope.Kind with | (Namespace | NestedModule | TopModule), OpenDeclaration | _ when oldPos.Line <= line -> - result := + result <- Some ({ Idents = match scope with | [] -> oldScope.Idents @@ -916,11 +916,11 @@ module ParsedInput = if range.EndLine >= currentLine then let isModule = kind.IsModule match isModule, parent, ident with - | false, _, _ -> ns := Some (longIdentToIdents ident) + | false, _, _ -> ns <- Some (longIdentToIdents ident) // top level module with "inlined" namespace like Ns1.Ns2.TopModule | true, [], _f :: _s :: _ -> let ident = longIdentToIdents ident - ns := Some (ident.[0..ident.Length - 2]) + ns <- Some (ident.[0..ident.Length - 2]) | _ -> () let fullIdent = parent @ ident @@ -958,9 +958,9 @@ module ParsedInput = | ParsedInput.ImplFile input -> walkImplFileInput input let res = - !result + result |> Option.map (fun (scope, pos, _) -> - let ns = !ns |> Option.map longIdentToIdents + let ns = ns |> Option.map longIdentToIdents scope, ns, mkPos (pos.Line + 1) pos.Column) let modules = diff --git a/src/fsharp/service/ServiceLexing.fs b/src/fsharp/service/ServiceLexing.fs index c37ea56d903..a984a5ce1e1 100644 --- a/src/fsharp/service/ServiceLexing.fs +++ b/src/fsharp/service/ServiceLexing.fs @@ -475,10 +475,10 @@ module internal LexerStateEncoding = let callLexCont lexcont args skip lexbuf = let argsWithIfDefs ifd = - if !args.ifdefStack = ifd then + if args.ifdefStack = ifd then args else - {args with ifdefStack = ref ifd} + {args with ifdefStack = ifd} match lexcont with | LexCont.EndLine cont -> Lexer.endline cont args skip lexbuf | LexCont.Token ifd -> Lexer.token (argsWithIfDefs ifd) skip lexbuf @@ -775,8 +775,8 @@ type FSharpSourceTokenizer(defineConstants: string list, filename: string option let lexResourceManager = new Lexhelp.LexResourceManager() - let lexArgsLightOn = mkLexargs(filename, defineConstants, LightSyntaxStatus(true, false), lexResourceManager, ref [], DiscardErrorsLogger, PathMap.empty) - let lexArgsLightOff = mkLexargs(filename, defineConstants, LightSyntaxStatus(false, false), lexResourceManager, ref [], DiscardErrorsLogger, PathMap.empty) + let lexArgsLightOn = mkLexargs(filename, defineConstants, LightSyntaxStatus(true, false), lexResourceManager, [], DiscardErrorsLogger, PathMap.empty) + let lexArgsLightOff = mkLexargs(filename, defineConstants, LightSyntaxStatus(false, false), lexResourceManager, [], DiscardErrorsLogger, PathMap.empty) member this.CreateLineTokenizer(lineText: string) = let lexbuf = UnicodeLexing.StringAsLexbuf(isFeatureSupported, lineText) diff --git a/src/fsharp/service/ServiceNavigation.fs b/src/fsharp/service/ServiceNavigation.fs index 3578543b65b..dbecc3b101b 100755 --- a/src/fsharp/service/ServiceNavigation.fs +++ b/src/fsharp/service/ServiceNavigation.fs @@ -97,15 +97,15 @@ module NavigationImpl = /// Get information for implementation file let getNavigationFromImplFile (modules: SynModuleOrNamespace list) = // Map for dealing with name conflicts - let nameMap = ref Map.empty + let mutable nameMap = Map.empty let addItemName name = - let count = defaultArg (!nameMap |> Map.tryFind name) 0 - nameMap := (Map.add name (count + 1) (!nameMap)) + let count = defaultArg (nameMap |> Map.tryFind name) 0 + nameMap <- (Map.add name (count + 1) (nameMap)) (count + 1) let uniqueName name idx = - let total = Map.find name (!nameMap) + let total = Map.find name nameMap sprintf "%s_%d_of_%d" name idx total // Create declaration (for the left dropdown) @@ -305,13 +305,13 @@ module NavigationImpl = /// Get information for signature file let getNavigationFromSigFile (modules: SynModuleOrNamespaceSig list) = // Map for dealing with name conflicts - let nameMap = ref Map.empty + let mutable nameMap = Map.empty let addItemName name = - let count = defaultArg (!nameMap |> Map.tryFind name) 0 - nameMap := (Map.add name (count + 1) (!nameMap)) + let count = defaultArg (nameMap |> Map.tryFind name) 0 + nameMap <- (Map.add name (count + 1) (nameMap)) (count + 1) let uniqueName name idx = - let total = Map.find name (!nameMap) + let total = Map.find name nameMap sprintf "%s_%d_of_%d" name idx total // Create declaration (for the left dropdown) diff --git a/src/fsharp/service/service.fs b/src/fsharp/service/service.fs index a98cb2966b9..fdaacbb3764 100644 --- a/src/fsharp/service/service.fs +++ b/src/fsharp/service/service.fs @@ -1013,7 +1013,7 @@ type FSharpChecker(legacyReferenceResolver, CompileHelpers.setOutputStreams execute // References used to capture the results of compilation - let tcImportsRef = ref (None: TcImports option) + let tcImportsRef = ref None let assemblyBuilderRef = ref None let tcImportsCapture = Some (fun tcImports -> tcImportsRef := Some tcImports) diff --git a/src/fsharp/tast.fs b/src/fsharp/tast.fs index 98ededce05b..7dc9e0a11d4 100644 --- a/src/fsharp/tast.fs +++ b/src/fsharp/tast.fs @@ -1830,21 +1830,21 @@ and [] // We do not need to lock this mutable state this it is only ever accessed from the compiler thread. let activePatternElemRefCache: NameMap option ref = ref None - let modulesByDemangledNameCache: NameMap option ref = ref None + let mutable modulesByDemangledNameCache: NameMap option = None - let exconsByDemangledNameCache: NameMap option ref = ref None + let mutable exconsByDemangledNameCache: NameMap option = None - let tyconsByDemangledNameAndArityCache: LayeredMap option ref = ref None + let mutable tyconsByDemangledNameAndArityCache: LayeredMap option = None - let tyconsByAccessNamesCache: LayeredMultiMap option ref = ref None + let mutable tyconsByAccessNamesCache: LayeredMultiMap option = None - let tyconsByMangledNameCache: NameMap option ref = ref None + let mutable tyconsByMangledNameCache: NameMap option = None - let allEntitiesByMangledNameCache: NameMap option ref = ref None + let mutable allEntitiesByMangledNameCache: NameMap option = None - let allValsAndMembersByPartialLinkageKeyCache: MultiMap option ref = ref None + let mutable allValsAndMembersByPartialLinkageKeyCache: MultiMap option = None - let allValsByLogicalNameCache: NameMap option ref = ref None + let mutable allValsByLogicalNameCache: NameMap option = None /// Namespace or module-compiled-as-type? member mtyp.ModuleOrNamespaceKind = kind @@ -1861,17 +1861,17 @@ and [] /// Mutation used during compilation of FSharp.Core.dll member mtyp.AddModuleOrNamespaceByMutation(modul: ModuleOrNamespace) = entities <- QueueList.appendOne entities modul - modulesByDemangledNameCache := None - allEntitiesByMangledNameCache := None + modulesByDemangledNameCache <- None + allEntitiesByMangledNameCache <- None #if !NO_EXTENSIONTYPING /// Mutation used in hosting scenarios to hold the hosted types in this module or namespace member mtyp.AddProvidedTypeEntity(entity: Entity) = entities <- QueueList.appendOne entities entity - tyconsByMangledNameCache := None - tyconsByDemangledNameAndArityCache := None - tyconsByAccessNamesCache := None - allEntitiesByMangledNameCache := None + tyconsByMangledNameCache <- None + tyconsByDemangledNameAndArityCache <- None + tyconsByAccessNamesCache <- None + allEntitiesByMangledNameCache <- None #endif /// Return a new module or namespace type with an entity added. @@ -1901,19 +1901,19 @@ and [] /// table is indexed by both name and generic arity. This means that for generic /// types "List`1", the entry (List, 1) will be present. member mtyp.TypesByDemangledNameAndArity m = - cacheOptRef tyconsByDemangledNameAndArityCache (fun () -> + cacheOptByref &tyconsByDemangledNameAndArityCache (fun () -> LayeredMap.Empty.AddAndMarkAsCollapsible( mtyp.TypeAndExceptionDefinitions |> List.map (fun (tc: Tycon) -> KeyTyconByDemangledNameAndArity tc.LogicalName (tc.Typars m) tc) |> List.toArray)) /// Get a table of types defined within this module, namespace or type. The /// table is indexed by both name and, for generic types, also by mangled name. member mtyp.TypesByAccessNames = - cacheOptRef tyconsByAccessNamesCache (fun () -> + cacheOptByref &tyconsByAccessNamesCache (fun () -> LayeredMultiMap.Empty.AddAndMarkAsCollapsible (mtyp.TypeAndExceptionDefinitions |> List.toArray |> Array.collect (fun (tc: Tycon) -> KeyTyconByAccessNames tc.LogicalName tc))) // REVIEW: we can remove this lookup and use AllEntitiesByMangledName instead? member mtyp.TypesByMangledName = let addTyconByMangledName (x: Tycon) tab = NameMap.add x.LogicalName x tab - cacheOptRef tyconsByMangledNameCache (fun () -> + cacheOptByref &tyconsByMangledNameCache (fun () -> List.foldBack addTyconByMangledName mtyp.TypeAndExceptionDefinitions Map.empty) /// Get a table of entities indexed by both logical and compiled names @@ -1925,7 +1925,7 @@ and [] if name1 = name2 then tab else NameMap.add name2 x tab - cacheOptRef allEntitiesByMangledNameCache (fun () -> + cacheOptByref &allEntitiesByMangledNameCache (fun () -> QueueList.foldBack addEntityByMangledName entities Map.empty) /// Get a table of entities indexed by both logical name @@ -1942,7 +1942,7 @@ and [] MultiMap.add key x tab else tab - cacheOptRef allValsAndMembersByPartialLinkageKeyCache (fun () -> + cacheOptByref &allValsAndMembersByPartialLinkageKeyCache (fun () -> QueueList.foldBack addValByMangledName vals MultiMap.empty) /// Try to find the member with the given linkage key in the given module. @@ -1963,7 +1963,7 @@ and [] NameMap.add x.LogicalName x tab else tab - cacheOptRef allValsByLogicalNameCache (fun () -> + cacheOptByref &allValsByLogicalNameCache (fun () -> QueueList.foldBack addValByName vals Map.empty) /// Compute a table of values and members indexed by logical name. @@ -1978,7 +1978,7 @@ and [] /// Get a table of F# exception definitions indexed by demangled name, so 'FailureException' is indexed by 'Failure' member mtyp.ExceptionDefinitionsByDemangledName = let add (tycon: Tycon) acc = NameMap.add tycon.LogicalName tycon acc - cacheOptRef exconsByDemangledNameCache (fun () -> + cacheOptByref &exconsByDemangledNameCache (fun () -> List.foldBack add mtyp.ExceptionDefinitions Map.empty) /// Get a table of nested module and namespace fragments indexed by demangled name (so 'ListModule' becomes 'List') @@ -1987,7 +1987,7 @@ and [] if entity.IsModuleOrNamespace then NameMap.add entity.DemangledModuleOrNamespaceName entity acc else acc - cacheOptRef modulesByDemangledNameCache (fun () -> + cacheOptByref &modulesByDemangledNameCache (fun () -> QueueList.foldBack add entities Map.empty) [] diff --git a/src/ilx/ilxsettings.fs b/src/ilx/ilxsettings.fs index 64ea572e02f..2355a6b1b6e 100644 --- a/src/ilx/ilxsettings.fs +++ b/src/ilx/ilxsettings.fs @@ -10,18 +10,18 @@ type IlxCallImplementation = | VirtEntriesVirtCode //++GLOBAL MUTABLE STATE (concurrency-safe because assigned only during F# library compilation) -let ilxCompilingFSharpCoreLib = ref false +let mutable ilxCompilingFSharpCoreLib = false //++GLOBAL MUTABLE STATE (concurrency-safe because assigned only during F# library compilation) -let ilxFsharpCoreLibAssemRef = ref (None : ILAssemblyRef option) +let mutable ilxFsharpCoreLibAssemRef = None : ILAssemblyRef option /// Scope references for FSharp.Core.dll let ilxFsharpCoreLibScopeRef () = - if !ilxCompilingFSharpCoreLib then + if ilxCompilingFSharpCoreLib then ILScopeRef.Local else let assemblyRef = - match !ilxFsharpCoreLibAssemRef with + match ilxFsharpCoreLibAssemRef with | Some o -> o | None -> // The exact public key token and version used here don't actually matter, or shouldn't. diff --git a/src/utils/TaggedCollections.fs b/src/utils/TaggedCollections.fs index 54f7e07fc5f..479b8def82b 100644 --- a/src/utils/TaggedCollections.fs +++ b/src/utils/TaggedCollections.fs @@ -478,15 +478,15 @@ namespace Internal.Utilities.Collections.Tagged not stack.IsEmpty let toSeq s = - let i = ref (SetIterator s) + let mutable i = SetIterator s { new IEnumerator<_> with - member __.Current = (!i).Current + member _.Current = i.Current interface System.Collections.IEnumerator with - member __.Current = box (!i).Current - member __.MoveNext() = (!i).MoveNext() - member __.Reset() = i := SetIterator s + member _.Current = box i.Current + member _.MoveNext() = i.MoveNext() + member _.Reset() = i <- SetIterator s interface System.IDisposable with - member __.Dispose() = () } + member _.Dispose() = () } //-------------------------------------------------------------------------- // Set comparison. This can be expensive. @@ -545,8 +545,8 @@ namespace Internal.Utilities.Collections.Tagged loop s [] let copyToArray s (arr: _[]) i = - let j = ref i - iter (fun x -> arr.[!j] <- x; j := !j + 1) s + let mutable j = i + iter (fun x -> arr.[j] <- x; j <- j + 1) s let toArray s = let n = (count s) @@ -1019,8 +1019,8 @@ namespace Internal.Utilities.Collections.Tagged mkFromEnumerator comparer empty ie let copyToArray s (arr: _[]) i = - let j = ref i - s |> iter (fun x y -> arr.[!j] <- KeyValuePair(x,y); j := !j + 1) + let mutable j = i + s |> iter (fun x y -> arr.[j] <- KeyValuePair(x,y); j <- j + 1) /// Imperative left-to-right iterators. @@ -1078,13 +1078,13 @@ namespace Internal.Utilities.Collections.Tagged not stack.IsEmpty let toSeq s = - let i = ref (MapIterator(s)) + let mutable i = MapIterator(s) { new IEnumerator<_> with - member self.Current = (!i).Current + member self.Current = i.Current interface System.Collections.IEnumerator with - member self.Current = box (!i).Current - member self.MoveNext() = (!i).MoveNext() - member self.Reset() = i := MapIterator(s) + member self.Current = box i.Current + member self.MoveNext() = i.MoveNext() + member self.Reset() = i <- MapIterator(s) interface System.IDisposable with member self.Dispose() = ()} diff --git a/src/utils/sformat.fs b/src/utils/sformat.fs index 21b84f5bef7..013e55626e5 100644 --- a/src/utils/sformat.fs +++ b/src/utils/sformat.fs @@ -822,9 +822,9 @@ namespace Microsoft.FSharp.Text.StructuredPrintfImpl let path = Dictionary(10,HashIdentity.Reference) // Roughly count the "nodes" printed, e.g. leaf items and inner nodes, but not every bracket and comma. - let size = ref opts.PrintSize - let exceededPrintSize() = !size<=0 - let countNodes n = if !size > 0 then size := !size - n else () // no need to keep decrementing (and avoid wrap around) + let mutable size = opts.PrintSize + let exceededPrintSize() = size<=0 + let countNodes n = if size > 0 then size <- size - n else () // no need to keep decrementing (and avoid wrap around) let stopShort _ = exceededPrintSize() // for unfoldL // Recursive descent diff --git a/tests/FSharp.Compiler.UnitTests/HashIfExpression.fs b/tests/FSharp.Compiler.UnitTests/HashIfExpression.fs index 21667c727d5..3f91e0f1591 100644 --- a/tests/FSharp.Compiler.UnitTests/HashIfExpression.fs +++ b/tests/FSharp.Compiler.UnitTests/HashIfExpression.fs @@ -59,12 +59,11 @@ type HashIfExpression() = member x.ErrorCount = errors.Count } - let stack : LexerIfdefStack = ref [] let lightSyntax = LightSyntaxStatus(true, false) let resourceManager = LexResourceManager () let defines = [] let startPos = Position.Empty - let args = mkLexargs ("dummy", defines, lightSyntax, resourceManager, stack, errorLogger, PathMap.empty) + let args = mkLexargs ("dummy", defines, lightSyntax, resourceManager, [], errorLogger, PathMap.empty) CompileThreadStatic.ErrorLogger <- errorLogger diff --git a/tests/fsharp/Compiler/CodeGen/EmittedIL/StaticLinkTests.fs b/tests/fsharp/Compiler/CodeGen/EmittedIL/StaticLinkTests.fs new file mode 100644 index 00000000000..8d47ba7dd7e --- /dev/null +++ b/tests/fsharp/Compiler/CodeGen/EmittedIL/StaticLinkTests.fs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information. + +namespace FSharp.Compiler.UnitTests.CodeGen.EmittedIL + +open System.IO +open System.Reflection +open FSharp.Compiler.UnitTests +open NUnit.Framework + +[] +module StaticLinkTests = + + [] + let ``Static link simple library``() = + let module1 = + let source = + """ +module Module1 + +type C() = class end + """ + Compilation.Create(source, Fsx, Library) + + let module2 = + let source = + """ +let y = Module1.C() +printfn "%A" y + """ + Compilation.Create(source, Fsx, Exe, cmplRefs=[CompilationReference.CreateFSharp(module1, staticLink=true)]) + + CompilerAssert.Execute(module2, + beforeExecute=(fun _ deps -> + deps + |> List.iter (fun dep -> try File.Delete dep with | _ -> ()))) + + [] + let ``Simple exe should fail to execute if dependency was not found and is not statically linked``() = + let module1 = + let source = + """ +module Module1 + +type C() = class end + """ + Compilation.Create(source, Fsx, Library) + + let module2 = + let source = + """ +let y = Module1.C() +printfn "%A" y + """ + Compilation.Create(source, Fsx, Exe, cmplRefs=[CompilationReference.CreateFSharp module1]) + + Assert.Throws(fun _ -> + CompilerAssert.Execute(module2, + beforeExecute=(fun _ deps -> + deps + |> List.iter (fun dep -> try File.Delete dep with | _ -> ())))) |> ignore + + [] + let ``Simple exe should execute if dependency was found and is not statically linked``() = + let module1 = + let source = + """ +module Module1 + +type C() = class end + """ + Compilation.Create(source, Fsx, Library) + + let module2 = + let source = + """ +let y = Module1.C() +printfn "%A" y + """ + Compilation.Create(source, Fsx, Exe, cmplRefs=[CompilationReference.CreateFSharp module1]) + + CompilerAssert.Execute module2 + + [] + let ``Static link quotes in multiple modules``() = + let module1 = + let source = + """ +module Module1 + +module Test = + let inline run() = + <@ fun (output:'T[]) (input:'T[]) (length:int) -> + let start = 0 + let mutable i = start + while i < length do + output.[i] <- input.[i] + i <- i + 1 @> + + let bar() = + sprintf "%A" (run()) + +type C() = + + [] + static member F x = (C(), System.DateTime.Now) + """ + Compilation.Create(source, Fsx, Library) + + let module2 = + let source = + """ + +let a = Module1.Test.bar() +let b = sprintf "%A" (Module1.Test.run()) + +let test1 = (a=b) +type D() = + + [] + static member F x = (Module1.C(), D(), System.DateTime.Now) + + +let z2 = Quotations.Expr.TryGetReflectedDefinition(typeof.GetMethod("F")) +let s2 = (sprintf "%2000A" z2) +let test2 = (s2 = "Some Lambda (x, NewTuple (NewObject (C), PropertyGet (None, Now, [])))") + +let z3 = Quotations.Expr.TryGetReflectedDefinition(typeof.GetMethod("F")) +let s3 = (sprintf "%2000A" z3) +let test3 = (s3 = "Some Lambda (x, NewTuple (NewObject (C), NewObject (D), PropertyGet (None, Now, [])))") + +#if EXTRAS +// Add some references to System.ValueTuple, and add a test case which statically links this DLL +let test4 = struct (3,4) +let test5 = struct (z2,z3) +#endif + +if not test1 then + stdout.WriteLine "*** test1 FAILED"; + eprintf "FAILED, in-module result %s is different from out-module call %s" a b + +if not test2 then + stdout.WriteLine "*** test2 FAILED"; + eprintf "FAILED, %s is different from expected" s2 +if not test3 then + stdout.WriteLine "*** test3 FAILED"; + eprintf "FAILED, %s is different from expected" s3 + + +if test1 && test2 && test3 then () +else failwith "Test Failed" + """ + Compilation.Create(source, Fsx, Exe, cmplRefs=[CompilationReference.CreateFSharp(module1, staticLink=true)]) + + CompilerAssert.Execute(module2, ignoreWarnings=true) + + [] + let ``Static link quotes in multiple modules - optimized``() = + let module1 = + let source = + """ +module Module1 + +module Test = + let inline run() = + <@ fun (output:'T[]) (input:'T[]) (length:int) -> + let start = 0 + let mutable i = start + while i < length do + output.[i] <- input.[i] + i <- i + 1 @> + + let bar() = + sprintf "%A" (run()) + +type C() = + + [] + static member F x = (C(), System.DateTime.Now) + """ + Compilation.Create(source, Fsx, Library, [|"--optimize+"|]) + + let module2 = + let source = + """ + +let a = Module1.Test.bar() +let b = sprintf "%A" (Module1.Test.run()) + +let test1 = (a=b) +type D() = + + [] + static member F x = (Module1.C(), D(), System.DateTime.Now) + + +let z2 = Quotations.Expr.TryGetReflectedDefinition(typeof.GetMethod("F")) +let s2 = (sprintf "%2000A" z2) +let test2 = (s2 = "Some Lambda (x, NewTuple (NewObject (C), PropertyGet (None, Now, [])))") + +let z3 = Quotations.Expr.TryGetReflectedDefinition(typeof.GetMethod("F")) +let s3 = (sprintf "%2000A" z3) +let test3 = (s3 = "Some Lambda (x, NewTuple (NewObject (C), NewObject (D), PropertyGet (None, Now, [])))") + +#if EXTRAS +// Add some references to System.ValueTuple, and add a test case which statically links this DLL +let test4 = struct (3,4) +let test5 = struct (z2,z3) +#endif + +if not test1 then + stdout.WriteLine "*** test1 FAILED"; + eprintf "FAILED, in-module result %s is different from out-module call %s" a b + +if not test2 then + stdout.WriteLine "*** test2 FAILED"; + eprintf "FAILED, %s is different from expected" s2 +if not test3 then + stdout.WriteLine "*** test3 FAILED"; + eprintf "FAILED, %s is different from expected" s3 + + +if test1 && test2 && test3 then () +else failwith "Test Failed" + """ + Compilation.Create(source, Fsx, Exe, [|"--optimize+"|], [CompilationReference.CreateFSharp(module1, staticLink=true)]) + + CompilerAssert.Execute(module2, ignoreWarnings=true) \ No newline at end of file diff --git a/tests/fsharp/Compiler/CompilerAssert.fs b/tests/fsharp/Compiler/CompilerAssert.fs index 1c753cd25e0..11e886a055d 100644 --- a/tests/fsharp/Compiler/CompilerAssert.fs +++ b/tests/fsharp/Compiler/CompilerAssert.fs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information. -namespace FSharp.Compiler.UnitTests +[] +module FSharp.Compiler.UnitTests.CompilerAssert open System open System.Diagnostics @@ -29,17 +30,54 @@ type ILVerifier (dllFilePath: string) = member this.VerifyILWithLineNumbers (qualifiedItemName: string, expectedIL: string) = ILChecker.checkILItemWithLineNumbers qualifiedItemName dllFilePath [ expectedIL ] -[] -module CompilerAssert = +type Worker () = + inherit MarshalByRefObject() - let checker = FSharpChecker.Create(suggestNamesForErrors=true) + member x.ExecuteTestCase assemblyPath (deps: string[]) = + AppDomain.CurrentDomain.add_AssemblyResolve(ResolveEventHandler(fun _ args -> + deps + |> Array.tryFind (fun (x: string) -> Path.GetFileNameWithoutExtension x = args.Name) + |> Option.bind (fun x -> if File.Exists x then Some x else None) + |> Option.map Assembly.LoadFile + |> Option.defaultValue null)) + let asm = Assembly.LoadFrom(assemblyPath) + let entryPoint = asm.EntryPoint + (entryPoint.Invoke(Unchecked.defaultof, [||])) |> ignore - let private config = TestFramework.initializeSuite () +type SourceKind = + | Fs + | Fsx + +type CompileOutput = + | Library + | Exe + +type CompilationReference = private CompilationReference of Compilation * staticLink: bool with + + static member CreateFSharp(cmpl: Compilation, ?staticLink) = + let staticLink = defaultArg staticLink false + CompilationReference(cmpl, staticLink) + +and Compilation = private Compilation of string * SourceKind * CompileOutput * options: string[] * CompilationReference list with + + static member Create(source, sourceKind, output, ?options, ?cmplRefs) = + let options = defaultArg options [||] + let cmplRefs = defaultArg cmplRefs [] + Compilation(source, sourceKind, output, options, cmplRefs) + +[] +type CompilerAssert private () = + + static let checker = FSharpChecker.Create(suggestNamesForErrors=true) + + static let config = TestFramework.initializeSuite () + + static let _ = config |> ignore // Do a one time dotnet sdk build to compute the proper set of reference assemblies to pass to the compiler #if !NETCOREAPP #else - let projectFile = """ + static let projectFile = """ @@ -56,13 +94,13 @@ module CompilerAssert = """ - let programFs = """ + static let programFs = """ open System [] let main argv = 0""" - let getNetCoreAppReferences = + static let getNetCoreAppReferences = let mutable output = "" let mutable errors = "" let mutable cleanUp = true @@ -107,37 +145,35 @@ let main argv = 0""" #endif #if FX_NO_APP_DOMAINS - let executeBuiltApp assembly = + static let executeBuiltApp assembly deps = let ctxt = AssemblyLoadContext("ContextName", true) try let asm = ctxt.LoadFromAssemblyPath(assembly) let entryPoint = asm.EntryPoint + ctxt.add_Resolving(fun ctxt name -> + deps + |> List.tryFind (fun (x: string) -> Path.GetFileNameWithoutExtension x = name.Name) + |> Option.map ctxt.LoadFromAssemblyPath + |> Option.defaultValue null) (entryPoint.Invoke(Unchecked.defaultof, [||])) |> ignore finally ctxt.Unload() #else - type Worker () = - inherit MarshalByRefObject() - - member __.ExecuteTestCase assemblyPath = - let asm = Assembly.LoadFrom(assemblyPath) - let entryPoint = asm.EntryPoint - (entryPoint.Invoke(Unchecked.defaultof, [||])) |> ignore - let pathToThisDll = Assembly.GetExecutingAssembly().CodeBase + static let pathToThisDll = Assembly.GetExecutingAssembly().CodeBase - let adSetup = + static let adSetup = let setup = new System.AppDomainSetup () setup.PrivateBinPath <- pathToThisDll setup - let executeBuiltApp assembly = + static let executeBuiltApp assembly deps = let ad = AppDomain.CreateDomain((Guid()).ToString(), null, adSetup) let worker = (ad.CreateInstanceFromAndUnwrap(pathToThisDll, typeof.FullName)) :?> Worker - worker.ExecuteTestCase assembly |>ignore + worker.ExecuteTestCase assembly (deps |> Array.ofList) |>ignore #endif - let private defaultProjectOptions = + static let defaultProjectOptions = { ProjectFileName = "Z:\\test.fsproj" ProjectId = None @@ -159,27 +195,146 @@ let main argv = 0""" Stamp = None } - let private gate = obj () - - let private compile isExe options source f = - lock gate <| fun () -> - let inputFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".fs") - let outputFilePath = Path.ChangeExtension (Path.GetTempFileName(), if isExe then ".exe" else ".dll") - try - File.WriteAllText (inputFilePath, source) - let args = - options - |> Array.append defaultProjectOptions.OtherOptions - |> Array.append [| "fsc.exe"; inputFilePath; "-o:" + outputFilePath; (if isExe then "--target:exe" else "--target:library"); "--nowin32manifest" |] - let errors, _ = checker.Compile args |> Async.RunSynchronously - - f (errors, outputFilePath) + static let rawCompile inputFilePath outputFilePath isExe options source = + File.WriteAllText (inputFilePath, source) + let args = + options + |> Array.append defaultProjectOptions.OtherOptions + |> Array.append [| "fsc.exe"; inputFilePath; "-o:" + outputFilePath; (if isExe then "--target:exe" else "--target:library"); "--nowin32manifest" |] + let errors, _ = checker.Compile args |> Async.RunSynchronously - finally - try File.Delete inputFilePath with | _ -> () - try File.Delete outputFilePath with | _ -> () + errors, outputFilePath - let Pass (source: string) = + static let compileAux isExe options source f : unit = + let inputFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".fs") + let outputFilePath = Path.ChangeExtension (Path.GetTempFileName(), if isExe then ".exe" else ".dll") + try + f (rawCompile inputFilePath outputFilePath isExe options source) + finally + try File.Delete inputFilePath with | _ -> () + try File.Delete outputFilePath with | _ -> () + + static let compileDisposable isScript isExe options source = + let ext = + if isScript then ".fsx" + else ".fs" + let inputFilePath = Path.ChangeExtension(Path.GetTempFileName(), ext) + let outputFilePath = Path.ChangeExtension (Path.GetTempFileName(), if isExe then ".exe" else ".dll") + let o = + { new IDisposable with + member _.Dispose() = + try File.Delete inputFilePath with | _ -> () + try File.Delete outputFilePath with | _ -> () } + try + o, rawCompile inputFilePath outputFilePath isExe options source + with + | _ -> + o.Dispose() + reraise() + + static let gate = obj () + + static let compile isExe options source f = + lock gate (fun _ -> compileAux isExe options source f) + + static let assertErrors ignoreWarnings (errors: FSharpErrorInfo[]) = + let errors = + if ignoreWarnings then + errors + |> Array.filter (fun error -> error.Severity <> FSharpErrorSeverity.Warning) + else + errors + if errors.Length > 0 then + Assert.Fail(sprintf "%A" errors) + + static let rec compileCompilationAux (disposals: ResizeArray) ignoreWarnings (cmpl: Compilation) : (FSharpErrorInfo[] * string) * string list = + let compilationRefs, deps = + match cmpl with + | Compilation(_, _, _, _, cmpls) -> + let compiledRefs = + cmpls + |> List.map (fun cmpl -> + match cmpl with + | CompilationReference (cmpl, staticLink) -> + compileCompilationAux disposals ignoreWarnings cmpl, staticLink) + + let compilationRefs = + compiledRefs + |> List.map (fun (((errors, outputFilePath), _), staticLink) -> + assertErrors ignoreWarnings errors + let rOption = "-r:" + outputFilePath + if staticLink then + [rOption;"--staticlink:" + Path.GetFileNameWithoutExtension outputFilePath] + else + [rOption]) + |> List.concat + |> Array.ofList + + let deps = + compiledRefs + |> List.map (fun ((_, deps), _) -> deps) + |> List.concat + |> List.distinct + + compilationRefs, deps + + let isScript = + match cmpl with + | Compilation(_, kind, _, _, _) -> + match kind with + | Fs -> false + | Fsx -> true + + let isExe = + match cmpl with + | Compilation(_, _, output, _, _) -> + match output with + | Library -> false + | Exe -> true + + let source = + match cmpl with + | Compilation(source, _, _, _, _) -> source + + let options = + match cmpl with + | Compilation(_, _, _, options, _) -> options + + let disposal, res = compileDisposable isScript isExe (Array.append options compilationRefs) source + disposals.Add disposal + + let deps2 = + compilationRefs + |> Array.filter (fun x -> not (x.Contains("--staticlink"))) + |> Array.map (fun x -> x.Replace("-r:", String.Empty)) + |> List.ofArray + + res, (deps @ deps2) + + static let rec compileCompilation ignoreWarnings (cmpl: Compilation) f = + let disposals = ResizeArray() + try + f (compileCompilationAux disposals ignoreWarnings cmpl) + finally + disposals + |> Seq.iter (fun x -> x.Dispose()) + + static member Compile(cmpl: Compilation, ?ignoreWarnings) = + let ignoreWarnings = defaultArg ignoreWarnings false + lock gate (fun () -> + compileCompilation ignoreWarnings cmpl (fun ((errors, _), _) -> + assertErrors ignoreWarnings errors)) + + static member Execute(cmpl: Compilation, ?ignoreWarnings, ?beforeExecute) = + let ignoreWarnings = defaultArg ignoreWarnings false + let beforeExecute = defaultArg beforeExecute (fun _ _ -> ()) + lock gate (fun () -> + compileCompilation ignoreWarnings cmpl (fun ((errors, outputFilePath), deps) -> + assertErrors ignoreWarnings errors + beforeExecute outputFilePath deps + executeBuiltApp outputFilePath deps)) + + static member Pass (source: string) = lock gate <| fun () -> let parseResults, fileAnswer = checker.ParseAndCheckFileInProject("test.fs", 0, SourceText.ofString source, defaultProjectOptions) |> Async.RunSynchronously @@ -191,7 +346,7 @@ let main argv = 0""" Assert.IsEmpty(typeCheckResults.Errors, sprintf "Type Check errors: %A" typeCheckResults.Errors) - let TypeCheckWithErrorsAndOptions options (source: string) expectedTypeErrors = + static member TypeCheckWithErrorsAndOptions options (source: string) expectedTypeErrors = lock gate <| fun () -> let parseResults, fileAnswer = checker.ParseAndCheckFileInProject( @@ -222,30 +377,30 @@ let main argv = 0""" Assert.AreEqual(expectedErrorMsg, info.Message, "expectedErrorMsg") ) - let TypeCheckWithErrors (source: string) expectedTypeErrors = - TypeCheckWithErrorsAndOptions [||] source expectedTypeErrors + static member TypeCheckWithErrors (source: string) expectedTypeErrors = + CompilerAssert.TypeCheckWithErrorsAndOptions [||] source expectedTypeErrors - let TypeCheckSingleErrorWithOptions options (source: string) (expectedServerity: FSharpErrorSeverity) (expectedErrorNumber: int) (expectedErrorRange: int * int * int * int) (expectedErrorMsg: string) = - TypeCheckWithErrorsAndOptions options source [| expectedServerity, expectedErrorNumber, expectedErrorRange, expectedErrorMsg |] + static member TypeCheckSingleErrorWithOptions options (source: string) (expectedServerity: FSharpErrorSeverity) (expectedErrorNumber: int) (expectedErrorRange: int * int * int * int) (expectedErrorMsg: string) = + CompilerAssert.TypeCheckWithErrorsAndOptions options source [| expectedServerity, expectedErrorNumber, expectedErrorRange, expectedErrorMsg |] - let TypeCheckSingleError (source: string) (expectedServerity: FSharpErrorSeverity) (expectedErrorNumber: int) (expectedErrorRange: int * int * int * int) (expectedErrorMsg: string) = - TypeCheckWithErrors source [| expectedServerity, expectedErrorNumber, expectedErrorRange, expectedErrorMsg |] + static member TypeCheckSingleError (source: string) (expectedServerity: FSharpErrorSeverity) (expectedErrorNumber: int) (expectedErrorRange: int * int * int * int) (expectedErrorMsg: string) = + CompilerAssert.TypeCheckWithErrors source [| expectedServerity, expectedErrorNumber, expectedErrorRange, expectedErrorMsg |] - let CompileExe (source: string) = + static member CompileExe (source: string) = compile true [||] source (fun (errors, _) -> if errors.Length > 0 then Assert.Fail (sprintf "Compile had warnings and/or errors: %A" errors)) - let CompileExeAndRun (source: string) = + static member CompileExeAndRun (source: string) = compile true [||] source (fun (errors, outputExe) -> if errors.Length > 0 then Assert.Fail (sprintf "Compile had warnings and/or errors: %A" errors) - executeBuiltApp outputExe + executeBuiltApp outputExe [] ) - let CompileLibraryAndVerifyILWithOptions options (source: string) (f: ILVerifier -> unit) = + static member CompileLibraryAndVerifyILWithOptions options (source: string) (f: ILVerifier -> unit) = compile false options source (fun (errors, outputFilePath) -> let errors = errors |> Array.filter (fun x -> x.Severity = FSharpErrorSeverity.Error) @@ -255,10 +410,10 @@ let main argv = 0""" f (ILVerifier outputFilePath) ) - let CompileLibraryAndVerifyIL (source: string) (f: ILVerifier -> unit) = - CompileLibraryAndVerifyILWithOptions [||] source f + static member CompileLibraryAndVerifyIL (source: string) (f: ILVerifier -> unit) = + CompilerAssert.CompileLibraryAndVerifyILWithOptions [||] source f - let RunScript (source: string) (expectedErrorMessages: string list) = + static member RunScript (source: string) (expectedErrorMessages: string list) = lock gate <| fun () -> // Intialize output and input streams use inStream = new StringReader("") @@ -294,7 +449,7 @@ let main argv = 0""" Assert.AreEqual(expectedErrorMessage, errorMessage) ) - let ParseWithErrors (source: string) expectedParseErrors = + static member ParseWithErrors (source: string) expectedParseErrors = let sourceFileName = "test.fs" let parsingOptions = { FSharpParsingOptions.Default with SourceFiles = [| sourceFileName |] } let parseResults = checker.ParseFile(sourceFileName, SourceText.ofString source, parsingOptions) |> Async.RunSynchronously diff --git a/tests/fsharp/FSharpSuite.Tests.fsproj b/tests/fsharp/FSharpSuite.Tests.fsproj index 563358e1a1f..baec3523e1d 100644 --- a/tests/fsharp/FSharpSuite.Tests.fsproj +++ b/tests/fsharp/FSharpSuite.Tests.fsproj @@ -34,6 +34,7 @@ + diff --git a/vsintegration/Vsix/RegisterFsharpPackage.pkgdef b/vsintegration/Vsix/RegisterFsharpPackage.pkgdef index c4a9de29e24..3c029f496cf 100644 --- a/vsintegration/Vsix/RegisterFsharpPackage.pkgdef +++ b/vsintegration/Vsix/RegisterFsharpPackage.pkgdef @@ -55,7 +55,7 @@ "1"="{92EF0900-2251-11D2-B72E-0000F87572EF}" [$RootKey$\Packages\{91a04a73-4f2c-4e7c-ad38-c1a68e7da05c}] -"ProductVersion"="10.4" +"ProductVersion"="{{FSProductVersion}}" "ProductName"="Visual F#" "CompanyName"="Microsoft Corp." diff --git a/vsintegration/Vsix/VisualFSharpFull/VisualFSharpFull.csproj b/vsintegration/Vsix/VisualFSharpFull/VisualFSharpFull.csproj index 68b8b5e1122..44554938774 100644 --- a/vsintegration/Vsix/VisualFSharpFull/VisualFSharpFull.csproj +++ b/vsintegration/Vsix/VisualFSharpFull/VisualFSharpFull.csproj @@ -14,16 +14,22 @@ Designer - - Always - true - RegisterFsharpPackage.pkgdef - + + + RegisterFsharpPackage.pkgdef + {{FSProductVersion}} + $(FSProductVersion) + {{FSLanguageVersion}} + $(FSLanguageVersion) + true + + PreserveNewest License.txt true + PreserveNewest FSharp.Data.TypeProviders.dll diff --git a/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs b/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs index e45e28a19d0..10dd261e8f5 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/Tokenizer.fs @@ -458,7 +458,7 @@ module internal Tokenizer = let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) let tokens = ResizeArray() let mutable tokenInfoOption = None - let previousLexState = ref lexState + let mutable previousLexState = lexState let processToken() = let classificationType = compilerTokenToRoslynToken(tokenInfoOption.Value.ColorClass) @@ -471,9 +471,9 @@ module internal Tokenizer = tokens.Add savedToken let scanAndColorNextToken() = - let info, nextLexState = lineTokenizer.ScanToken(!previousLexState) + let info, nextLexState = lineTokenizer.ScanToken(previousLexState) tokenInfoOption <- info - previousLexState := nextLexState + previousLexState <- nextLexState // Apply some hacks to clean up the token stream (we apply more later) match info with @@ -519,7 +519,7 @@ module internal Tokenizer = classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) startPosition <- endPosition - SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans.ToArray(), tokens.ToArray()) + SourceLineData(textLine.Start, lexState, previousLexState, lineContents.GetHashCode(), classifiedSpans.ToArray(), tokens.ToArray()) // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/ProjectSystem.fsproj b/vsintegration/src/FSharp.ProjectSystem.FSharp/ProjectSystem.fsproj index 7779a1a3f0e..8408b7a2989 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/ProjectSystem.fsproj +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/ProjectSystem.fsproj @@ -24,13 +24,22 @@ Menus.ctmenu Designer + + + true Microsoft.VisualStudio.FSharp.ProjectSystem.FSharpSR true VSPackage Designer + $(IntermediateOutputPath)resources\ + {{FSProductVersion}} + $(FSProductVersion) + {{FSLanguageVersion}} + $(FSLanguageVersion) + diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/VSPackage.resx b/vsintegration/src/FSharp.ProjectSystem.FSharp/VSPackage.resx index 0e636602e52..f29e74b9ecb 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/VSPackage.resx +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/VSPackage.resx @@ -464,10 +464,10 @@ Customizes the environment to maximize code editor screen space and improve the visibility of F# commands and tool windows. - Microsoft Visual F# Tools 10.4 for F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} 1.0 @@ -476,7 +476,7 @@ Microsoft Visual F# Tools - Visual F# Tools 10.4 for F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} F# Interactive diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.cs.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.cs.xlf index e27841feae2..f047f0cf58d 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.cs.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.cs.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Nástroje Microsoft Visual F# 10.4 pro F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Nástroje Microsoft Visual F# {{FSProductVersion}} pro F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Nástroje Microsoft Visual F# 10.4 pro F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Nástroje Microsoft Visual F# {{FSProductVersion}} pro F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Nástroje Visual F# 10.4 pro F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Nástroje Visual F# {{FSProductVersion}} pro F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.de.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.de.xlf index 57e31224de5..cd550b54944 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.de.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.de.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 für f# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} für f# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 für f# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} für f# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 für F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} für F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.es.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.es.xlf index 2ff3a9663bf..2204dcdc7d1 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.es.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.es.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Herramientas de Microsoft Visual F# 10.4 para F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Herramientas de Microsoft Visual F# {{FSProductVersion}} para F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Herramientas de Microsoft Visual F# 10.4 para F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Herramientas de Microsoft Visual F# {{FSProductVersion}} para F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 para F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} para F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.fr.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.fr.xlf index 0e5d733739b..a7bedb7cc55 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.fr.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.fr.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 pour F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} pour F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 pour F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} pour F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 pour F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} pour F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.it.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.it.xlf index 6e93bc28a74..ba72bff02e0 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.it.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.it.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 per F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} per F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 per F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} per F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 per F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} per F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ja.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ja.xlf index e0c79c4a2bf..710b54d5b70 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ja.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ja.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6 用 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} 用 Microsoft Visual F# Tools {{FSProductVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6 用 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} 用 Microsoft Visual F# Tools {{FSProductVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - F# 4.6 用 Visual F# Tools 10.4 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} 用 Visual F# Tools {{FSProductVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ko.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ko.xlf index 4133f5ea293..588d5e014b9 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ko.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ko.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6용 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}}용 Microsoft Visual F# Tools {{FSProductVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6용 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}}용 Microsoft Visual F# Tools {{FSProductVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - F# 4.6용 Visual F# Tools 10.4 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}}용 Visual F# Tools {{FSProductVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pl.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pl.xlf index 5ae72d11dc8..08a4b45dfb4 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pl.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pl.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 dla języka F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} dla języka F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 dla języka F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} dla języka F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 dla języka F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} dla języka F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pt-BR.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pt-BR.xlf index eb0221892ad..6ae72d28fde 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pt-BR.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.pt-BR.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 para F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} para F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 para F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} para F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 para F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} para F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ru.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ru.xlf index 897d2db04fc..418494b459f 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ru.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.ru.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 для F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} для F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 для F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} для F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 для F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} для F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.tr.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.tr.xlf index b435f62c817..53643a6ae2a 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.tr.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.tr.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6 için Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} için Microsoft Visual F# Tools {{FSProductVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - F# 4.6 için Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} için Microsoft Visual F# Tools {{FSProductVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - F# 4.6 için Visual F# Araçları 10.4 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + F# {{FSLanguageVersion}} için Visual F# Araçları {{FSProductVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hans.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hans.xlf index 24e4048ae2e..4e3108b778f 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hans.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hans.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 for F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - Microsoft Visual F# Tools 10.4 for F# 4.6 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - Visual F# Tools 10.4 for F# 4.6 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} diff --git a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hant.xlf b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hant.xlf index 326a36bb813..7eb95bad0aa 100644 --- a/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hant.xlf +++ b/vsintegration/src/FSharp.ProjectSystem.FSharp/xlf/VSPackage.zh-Hant.xlf @@ -433,13 +433,13 @@ - Microsoft Visual F# Tools 10.4 for F# 4.6 - 適用於 F# 4.6 的 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + 適用於 F# {{FSLanguageVersion}} 的 Microsoft Visual F# Tools {{FSProductVersion}} - Microsoft Visual F# Tools 10.4 for F# 4.6 - 適用於 F# 4.6 的 Microsoft Visual F# Tools 10.4 + Microsoft Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + 適用於 F# {{FSLanguageVersion}} 的 Microsoft Visual F# Tools {{FSProductVersion}} @@ -453,8 +453,8 @@ - Visual F# Tools 10.4 for F# 4.6 - 適用於 F# 4.6 的 Visual F# Tools 10.4 + Visual F# Tools {{FSProductVersion}} for F# {{FSLanguageVersion}} + 適用於 F# {{FSLanguageVersion}} 的 Visual F# Tools {{FSProductVersion}} diff --git a/vsintegration/src/FSharp.VS.FSI/sessions.fs b/vsintegration/src/FSharp.VS.FSI/sessions.fs index 5e07e4672ea..b1b5a85616d 100644 --- a/vsintegration/src/FSharp.VS.FSI/sessions.fs +++ b/vsintegration/src/FSharp.VS.FSI/sessions.fs @@ -42,9 +42,9 @@ type internal EventWrapper() = /// Exceptions raised by f x are caught and reported in DEBUG mode. let timeoutApp descr timeoutMS (f : 'a -> 'b) (arg:'a) = use ev = new EventWrapper() - let r : 'b option ref = ref None + let mutable r = None System.Threading.ThreadPool.QueueUserWorkItem(fun _ -> - r := + r <- try f arg |> Some with @@ -63,7 +63,7 @@ let timeoutApp descr timeoutMS (f : 'a -> 'b) (arg:'a) = ev.Set() ) |> ignore ev.WaitOne(timeoutMS) |> ignore - !r + r module SessionsProperties = let mutable useAnyCpuVersion = true // 64-bit by default