diff --git a/README.md b/README.md
index 531fd2b7b4c..fa0931e80c9 100644
--- a/README.md
+++ b/README.md
@@ -6,13 +6,15 @@ This repository contains useful tools that the Azure SDK team utilizes across th
| Package or Intent | Path | Description | Status |
| ------------------------------ | ------------------------------------------------------- | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| Check Enforcer | [Readme](tools/check-enforcer/README.md) | Manage GitHub check-runs in a mono-repo. | Not Yet Enabled |
+| Check Enforcer [1] | [Readme](https://github.com/Azure/azure-sdk-actions/blob/main/docs/check-enforcer.md) | Manage GitHub check-runs in a mono-repo. | Enabled via GitHub actions |
| doc-warden | [Readme](packages/python-packages/doc-warden/README.md) | A tool used to enforce readme standards across Azure SDK Repos. | [![Build Status](https://dev.azure.com/azure-sdk/public/_apis/build/status/108?branchName=main)](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=108&branchName=main) |
| http-fault-injector | [Readme](tools/http-fault-injector/README.md) | HTTP proxy server for testing HTTP clients during "faults" like "connection closed in middle of body". | [![Build Status](https://dev.azure.com/azure-sdk/internal/_apis/build/status/tools/tools%20-%20http-fault-injector?branchName=main)](https://dev.azure.com/azure-sdk/internal/_build/latest?definitionId=2340&branchName=main) |
| Maven Plugin for Snippets | [Readme](packages/java-packages/snippet-replacer-maven-plugin/README.md) | A Maven plugin that that updates code snippets referenced from javadoc comments. | Not Yet Enabled |
| pixel insertion tool | [Readme](scripts/python/readme_tracking/readme.md) | A tool used to insert the requests for images served by `pixel server`. | Not Yet Enabled |
| pixel-server | [Readme](web/pixel-server/README.md) | A tiny ASP.NET Core site used to serve a pixel and record impressions. | Not Yet Enabled |
+[1] Check Enforcer is located in `azure-sdk-actions` repo.
+
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
diff --git a/doc/common/Cadl-Project-Scripts.md b/doc/common/Cadl-Project-Scripts.md
new file mode 100644
index 00000000000..054e09e0f22
--- /dev/null
+++ b/doc/common/Cadl-Project-Scripts.md
@@ -0,0 +1,146 @@
+# Integrating with cadl sync and generate scripts
+
+There are 2 common scripts provided for each language to be able to generate from within the language
+repo and use the remote cadl definition in the spec repo.
+
+## One time language repo setup
+
+There are 3 things that these two scripts expect are set up in your language repo before they will run correctly.
+1. Make sure your .gitignore is ignoring the TempCadlFiles
+2. Create a common emitter-package.json for your language
+3. Write the language specific hooks in Language-Settings.ps1
+
+### TempCadlFiles
+
+You should add a new entry in your .gitignore for your repo so that none of these files are accidentally checked in if [cleanup](#cleanup-anchor) is turned off.
+
+```
+# .gitignore file
+TempCadlFiles/
+```
+
+### emitter-package.json
+
+This will be the package.json that gets used when `npm install` is called. This replaces the package.json checked into the spec repo and allows each language to fix the version of their emitter to be the same for all packages in their repo.
+The file should be checked into this location `./eng/emitter-package.json`
+
+Example
+
+```json
+{
+ "main": "dist/src/index.js",
+ "dependencies": {
+ "@azure-tools/cadl-csharp": "0.1.11-beta.20230123.1"
+ }
+}
+```
+
+Note that cadl compile currently requires the "main" line to be there.
+
+### Language-Settings.ps1
+
+There are three methods you can write in your language repo to adjust the behavior of the scripts one of which is required.
+For each of these replace `${Language}` with the language identifier in your repo. If you don't know what this is you can look at `./eng/scripts/Language-Settings.ps1` in your language repo and you will find other functions that match this pattern that already exist.
+
+#### Get-${Language}-EmitterName *(Required)*
+
+This function simply returns the emitter name string.
+
+Example
+
+```powershell
+function Get-dotnet-EmitterName() {
+ return "@azure-tools/cadl-csharp"
+}
+```
+
+#### Get-${Language}-EmitterPackageJsonPath (Optional)
+
+This function allows you to specify the location and name of the emitter package.json to use. If this is omitted the script will assume the default location listed above `./eng/emitter-package.json`. The path must be absolute.
+
+Example
+
+```powershell
+function Get-dotnet-EmitterPackageJsonPath() {
+ return "D:\SomeOtherLocation\some-other-emitter-package.json"
+}
+```
+
+#### Get-${Language}-EmitterAdditionalOptions (Optional)
+
+This function allows you to append additional `--option` arguments that will be passed into cadl compile. One example of this is the `emitter-output-dir`. For dotnet we want the location of the generated files to be `{projectDir}/src` however in other languages they will have other conventions. This method will take in a fully qualified path to the project directory so you can construct your relative path to that as the output.
+
+Example
+
+```powershell
+function Get-dotnet-EmitterAdditionalOptions([string]$projectDirectory) {
+ return "--option @azure-tools/cadl-csharp.emitter-output-dir=$projectDirectory/src"
+}
+```
+
+## Per project setup
+
+Each project will need to have a configuration file that will tell the scripts where to find the cadl spec.
+
+### cadl-location.yaml
+
+This file should live under the project directory for each service and has the following properties
+
+| Property | Description | IsRequired |
+| --- | --- | --- |
+| directory | The top level directory where the main.cadl for the service lives. This should be relative to the spec repo root such as `specification/cognitiveservices/OpenAI.Inference` | true |
+| additionalDirectories | Sometimes a cadl file will use a relative import that might not be under the main directory. In this case a single `directory` will not be enough to pull down all necessary files. To support this you can specify additional directories as a list to sync so that all needed files are synced. | false: default = null |
+| commit | The commit sha for the version of the cadl files you want to generate off of. This allows us to have idempotence on generation until we opt into pointing at a later version. | true |
+| repo | The repo this spec lives in. This should be either `Azure/azure-rest-api-specs` or `Azure/azure-rest-api-specs-pr`. Note that pr will work locally but not in CI until we add another change to handle token based auth. | true |
+| cleanup | This will remove the TempCadlFiles directory after generation is complete if true otherwise this directory will be left to support local changes to the files to see how different changes would affect the generation. | false: default = true |
+
+Example
+
+```yml
+directory: specification/cognitiveservices/OpenAI.Inference
+additionalDirectories:
+ - specification/cognitiveservices/OpenAI.Authoring
+commit: 14f11cab735354c3e253045f7fbd2f1b9f90f7ca
+repo: Azure/azure-rest-api-specs
+cleanup: false
+```
+
+## Cadl-Project-Sync.ps1
+
+This is the first script that should be called and can be found at `./eng/common/scripts/Cadl-Project-Sync.ps1`. It takes in one parameter which is the root directory of the project which is typically one layer lower than the service directory. As an example for dotnet this is `./sdk/openai/Azure.AI.OpenAI` where `openai` is the service directory and `Azure.AI.OpenAI` is the project directory.
+
+```powershell
+./eng/common/scripts/Cadl-Project-Sync.ps1 ./sdk/openai/Azure.AI.OpenAI
+```
+
+This script will create a `sparse-spec` folder as a sibling to the root of your current git clone. Each project that is generated will get a sub directory inside of this folder named after the project you are generating. It will then automatically filter to only the files in the [directory](#directory-anchor) defined in cadl-location.yaml, and sync to the [commit sha](#commit-anchor) defined in cadl-location.yaml.
+
+As an example if you have your language repo at `D:\git\azure-sdk-for-net` there will be a new directory `D:\git\sparse-spec\Azure.AI.OpenAI` where the sparse spec will live.
+
+This is then copied over to your project directory so that you can make temporary changes if needed. The location will be `./{projectDir}/TempCadlFiles`. This temporary directory will be [cleaned up](#cleanup-anchor) at the end of the generate script if set in the cadl-location.yaml.
+
+## Cadl-Project-Generate.ps1
+
+This is the second script that should be called and can be found at `./eng/common/scripts/Cadl-Project-Generate.ps1`. It takes the exact same parameter as the sync script.
+
+```powershell
+./eng/common/scripts/Cadl-Project-Generate.ps1 ./sdk/openai/Azure.AI.OpenAI
+```
+
+The first thing this does is clean up the npm install that might exist in `./{projectDir}/TempCadlFiles`, followed by replacing the package.json with the language static one.
+
+Once this is done it will run `npm install` followed by `cadl compile` which is the standard way to generate a cadl project.
+
+The exact command that gets run is output stdout to enable debugging if needed.
+
+We currently don't do anything to the cadl-project.yaml that gets pulled in from the spec repo to limit to just your language emitter instead we use the filter option on the command line `--emit $emitterName`. This allows you to isolate the generation to only things owned by your language so you can safely add generation dependencies in CI without needing to worry about noisy neighbors.
+
+## Build tool integration
+
+One use case that some languages have is to have their CI regenerate the project and then do a `git diff` to validate that there are no differences. This helps detect if people modify the generated files manually. To support this its valuable to have the exact same command to generate a project regardless of whether the individual library is autorest or cadl.
+
+To achieve this each language will have their own idiomatic tool set but whatever that toolset is can check to see if a cadl-location.yaml file exists, and if it does they can call the Cadl-Project-Sync.ps1 and Cadl-Project-Generate.ps1 scripts, otherwise they can call the autorest command they call today for all other libraries.
+
+In dotnet this is achieved by running `dotnet build /t:GenerateCode` regardless of which type of project it is the correct commands get called and it remains consistent and idiomatic to the language. In other languages this could be `npm generate` or `python generate.py` to do the same.
+
+Since the generate script simply is a wrapper for `npm install` and `cadl compile` you can still run those commands directly manually after the sync if you want to instead.
diff --git a/eng/common/TestResources/New-TestResources.ps1 b/eng/common/TestResources/New-TestResources.ps1
index 3e5a60f5585..49616b64b33 100644
--- a/eng/common/TestResources/New-TestResources.ps1
+++ b/eng/common/TestResources/New-TestResources.ps1
@@ -260,7 +260,7 @@ function BuildBicepFile([System.IO.FileSystemInfo] $file)
return $templateFilePath
}
-function BuildDeploymentOutputs([string]$serviceName, [object]$azContext, [object]$deployment) {
+function BuildDeploymentOutputs([string]$serviceName, [object]$azContext, [object]$deployment, [hashtable]$environmentVariables) {
$serviceDirectoryPrefix = BuildServiceDirectoryPrefix $serviceName
# Add default values
$deploymentOutputs = [Ordered]@{
@@ -277,7 +277,7 @@ function BuildDeploymentOutputs([string]$serviceName, [object]$azContext, [objec
"AZURE_SERVICE_DIRECTORY" = $serviceName.ToUpperInvariant();
}
- MergeHashes $EnvironmentVariables $(Get-Variable deploymentOutputs)
+ MergeHashes $environmentVariables $(Get-Variable deploymentOutputs)
foreach ($key in $deployment.Outputs.Keys) {
$variable = $deployment.Outputs[$key]
@@ -293,8 +293,15 @@ function BuildDeploymentOutputs([string]$serviceName, [object]$azContext, [objec
return $deploymentOutputs
}
-function SetDeploymentOutputs([string]$serviceName, [object]$azContext, [object]$deployment, [object]$templateFile) {
- $deploymentOutputs = BuildDeploymentOutputs $serviceName $azContext $deployment
+function SetDeploymentOutputs(
+ [string]$serviceName,
+ [object]$azContext,
+ [object]$deployment,
+ [object]$templateFile,
+ [hashtable]$environmentVariables = @{}
+) {
+ $deploymentEnvironmentVariables = $environmentVariables.Clone()
+ $deploymentOutputs = BuildDeploymentOutputs $serviceName $azContext $deployment $deploymentEnvironmentVariables
if ($OutFile) {
if (!$IsWindows) {
@@ -316,13 +323,20 @@ function SetDeploymentOutputs([string]$serviceName, [object]$azContext, [object]
Log "Persist the following environment variables based on your detected shell ($shell):`n"
}
+ # Write overwrite warnings first, since local execution prints a runnable command to export variables
+ foreach ($key in $deploymentOutputs.Keys) {
+ if ([Environment]::GetEnvironmentVariable($key)) {
+ Write-Warning "Deployment outputs will overwrite pre-existing environment variable '$key'"
+ }
+ }
+
# Marking values as secret by allowed keys below is not sufficient, as there may be outputs set in the ARM/bicep
# file that re-mark those values as secret (since all user-provided deployment outputs are treated as secret by default).
# This variable supports a second check on not marking previously allowed keys/values as secret.
$notSecretValues = @()
foreach ($key in $deploymentOutputs.Keys) {
$value = $deploymentOutputs[$key]
- $EnvironmentVariables[$key] = $value
+ $deploymentEnvironmentVariables[$key] = $value
if ($CI) {
if (ShouldMarkValueAsSecret $serviceName $key $value $notSecretValues) {
@@ -347,7 +361,7 @@ function SetDeploymentOutputs([string]$serviceName, [object]$azContext, [object]
}
}
- return $deploymentOutputs
+ return $deploymentEnvironmentVariables, $deploymentOutputs
}
# Support actions to invoke on exit.
@@ -400,17 +414,13 @@ try {
exit
}
- $UserName = GetUserName
-
- if (!$BaseName) {
- if ($CI) {
- $BaseName = 't' + (New-Guid).ToString('n').Substring(0, 16)
- Log "Generated base name '$BaseName' for CI build"
- } else {
- $BaseName = GetBaseName $UserName (GetServiceLeafDirectoryName $ServiceDirectory)
- Log "BaseName was not set. Using default base name '$BaseName'"
- }
- }
+ $serviceName = GetServiceLeafDirectoryName $ServiceDirectory
+ $BaseName, $ResourceGroupName = GetBaseAndResourceGroupNames `
+ -baseNameDefault $BaseName `
+ -resourceGroupNameDefault $ResourceGroupName `
+ -user (GetUserName) `
+ -serviceDirectoryName $serviceName `
+ -CI $CI
# Make sure pre- and post-scripts are passed formerly required arguments.
$PSBoundParameters['BaseName'] = $BaseName
@@ -546,19 +556,8 @@ try {
$ProvisionerApplicationOid = $sp.Id
}
- $serviceName = GetServiceLeafDirectoryName $ServiceDirectory
-
- $ResourceGroupName = if ($ResourceGroupName) {
- $ResourceGroupName
- } elseif ($CI) {
- # Format the resource group name based on resource group naming recommendations and limitations.
- "rg-{0}-$BaseName" -f ($serviceName -replace '[\.\\\/:]', '-').ToLowerInvariant().Substring(0, [Math]::Min($serviceName.Length, 90 - $BaseName.Length - 4)).Trim('-')
- } else {
- "rg-$BaseName"
- }
-
$tags = @{
- Owners = $UserName
+ Owners = (GetUserName)
ServiceDirectory = $ServiceDirectory
}
@@ -581,7 +580,6 @@ try {
# to determine whether resources should be removed.
Write-Host "Setting variable 'CI_HAS_DEPLOYED_RESOURCES': 'true'"
LogVsoCommand "##vso[task.setvariable variable=CI_HAS_DEPLOYED_RESOURCES;]true"
- $EnvironmentVariables['CI_HAS_DEPLOYED_RESOURCES'] = $true
}
Log "Creating resource group '$ResourceGroupName' in location '$Location'"
@@ -592,8 +590,7 @@ try {
if ($resourceGroup.ProvisioningState -eq 'Succeeded') {
# New-AzResourceGroup would've written an error and stopped the pipeline by default anyway.
Write-Verbose "Successfully created resource group '$($resourceGroup.ResourceGroupName)'"
- }
- elseif (!$resourceGroup) {
+ } elseif (!$resourceGroup) {
if (!$PSCmdlet.ShouldProcess($resourceGroupName)) {
# If the -WhatIf flag was passed, there will be no resource group created. Fake it.
$resourceGroup = [PSCustomObject]@{
@@ -601,7 +598,9 @@ try {
Location = $Location
}
} else {
- Write-Error "Resource group '$ResourceGroupName' already exists." -Category ResourceExists -RecommendedAction "Delete resource group '$ResourceGroupName', or overwrite it when redeploying."
+ Write-Error "Resource group '$ResourceGroupName' already exists." `
+ -Category ResourceExists `
+ -RecommendedAction "Delete resource group '$ResourceGroupName', or overwrite it when redeploying."
}
}
@@ -623,7 +622,10 @@ try {
$displayName = "$($baseName)$suffix.$ResourceType-resources.azure.sdk"
}
- $servicePrincipalWrapper = NewServicePrincipalWrapper -subscription $SubscriptionId -resourceGroup $ResourceGroupName -displayName $DisplayName
+ $servicePrincipalWrapper = NewServicePrincipalWrapper `
+ -subscription $SubscriptionId `
+ -resourceGroup $ResourceGroupName `
+ -displayName $DisplayName
$global:AzureTestPrincipal = $servicePrincipalWrapper
$global:AzureTestSubscription = $SubscriptionId
@@ -650,7 +652,8 @@ try {
}
}
catch {
- Write-Warning "The Object ID of the test application was unable to be queried. You may want to consider passing it explicitly with the 'TestApplicationOid` parameter."
+ Write-Warning ("The Object ID of the test application was unable to be queried. " +
+ "You may want to consider passing it explicitly with the 'TestApplicationOid` parameter.")
throw $_.Exception
}
@@ -667,7 +670,11 @@ try {
# If the role hasn't been explicitly assigned to the resource group and a cached service principal is in use,
# query to see if the grant is needed.
if (!$resourceGroupRoleAssigned -and $AzureTestPrincipal) {
- $roleAssignment = Get-AzRoleAssignment -ObjectId $AzureTestPrincipal.Id -RoleDefinitionName 'Owner' -ResourceGroupName "$ResourceGroupName" -ErrorAction SilentlyContinue
+ $roleAssignment = Get-AzRoleAssignment `
+ -ObjectId $AzureTestPrincipal.Id `
+ -RoleDefinitionName 'Owner' `
+ -ResourceGroupName "$ResourceGroupName" `
+ -ErrorAction SilentlyContinue
$resourceGroupRoleAssigned = ($roleAssignment.RoleDefinitionName -eq 'Owner')
}
@@ -677,12 +684,18 @@ try {
# the explicit grant.
if (!$resourceGroupRoleAssigned) {
Log "Attempting to assigning the 'Owner' role for '$ResourceGroupName' to the Test Application '$TestApplicationId'"
- $principalOwnerAssignment = New-AzRoleAssignment -RoleDefinitionName "Owner" -ApplicationId "$TestApplicationId" -ResourceGroupName "$ResourceGroupName" -ErrorAction SilentlyContinue
+ $principalOwnerAssignment = New-AzRoleAssignment `
+ -RoleDefinitionName "Owner" `
+ -ApplicationId "$TestApplicationId" `
+ -ResourceGroupName "$ResourceGroupName" `
+ -ErrorAction SilentlyContinue
if ($principalOwnerAssignment.RoleDefinitionName -eq 'Owner') {
Write-Verbose "Successfully assigned ownership of '$ResourceGroupName' to the Test Application '$TestApplicationId'"
} else {
- Write-Warning "The 'Owner' role for '$ResourceGroupName' could not be assigned. You may need to manually grant 'Owner' for the resource group to the Test Application '$TestApplicationId' if it does not have subscription-level permissions."
+ Write-Warning ("The 'Owner' role for '$ResourceGroupName' could not be assigned. " +
+ "You may need to manually grant 'Owner' for the resource group to the " +
+ "Test Application '$TestApplicationId' if it does not have subscription-level permissions.")
}
}
@@ -773,7 +786,12 @@ try {
Write-Host "Deployment '$($deployment.DeploymentName)' has CorrelationId '$($deployment.CorrelationId)'"
Write-Host "Successfully deployed template '$($templateFile.jsonFilePath)' to resource group '$($resourceGroup.ResourceGroupName)'"
- $deploymentOutputs = SetDeploymentOutputs $serviceName $context $deployment $templateFile
+ $deploymentEnvironmentVariables, $deploymentOutputs = SetDeploymentOutputs `
+ -serviceName $serviceName `
+ -azContext $context `
+ -deployment $deployment `
+ -templateFile $templateFile `
+ -environmentVariables $EnvironmentVariables
$postDeploymentScript = $templateFile.originalFilePath | Split-Path | Join-Path -ChildPath "$ResourceType-resources-post.ps1"
if (Test-Path $postDeploymentScript) {
@@ -793,7 +811,7 @@ try {
# Suppress output locally
if ($CI) {
- return $EnvironmentVariables
+ return $deploymentEnvironmentVariables
}
<#
diff --git a/eng/common/TestResources/Remove-TestResources.ps1 b/eng/common/TestResources/Remove-TestResources.ps1
index 069f2bf8a63..4e2b52c63a6 100644
--- a/eng/common/TestResources/Remove-TestResources.ps1
+++ b/eng/common/TestResources/Remove-TestResources.ps1
@@ -147,14 +147,13 @@ if (!$ResourceGroupName) {
exit 0
}
} else {
- if (!$BaseName) {
- $UserName = GetUserName
- $BaseName = GetBaseName $UserName $ServiceDirectory
- Log "BaseName was not set. Using default base name '$BaseName'"
- }
-
- # Format the resource group name like in New-TestResources.ps1.
- $ResourceGroupName = "rg-$BaseName"
+ $serviceName = GetServiceLeafDirectoryName $ServiceDirectory
+ $BaseName, $ResourceGroupName = GetBaseAndResourceGroupNames `
+ -baseNameDefault $BaseName `
+ -resourceGroupNameDefault $ResourceGroupName `
+ -user (GetUserName) `
+ -serviceDirectoryName $serviceName `
+ -CI $CI
}
}
diff --git a/eng/common/TestResources/SubConfig-Helpers.ps1 b/eng/common/TestResources/SubConfig-Helpers.ps1
index cc93def6aa8..f87b088c3dd 100644
--- a/eng/common/TestResources/SubConfig-Helpers.ps1
+++ b/eng/common/TestResources/SubConfig-Helpers.ps1
@@ -16,10 +16,45 @@ function GetUserName() {
return $UserName
}
-function GetBaseName([string]$user, [string]$serviceDirectoryName) {
+function GetBaseAndResourceGroupNames(
+ [string]$baseNameDefault,
+ [string]$resourceGroupNameDefault,
+ [string]$user,
+ [string]$serviceDirectoryName,
+ [bool]$CI
+) {
+ if ($CI) {
+ $base = 't' + (New-Guid).ToString('n').Substring(0, 16)
+ # Format the resource group name based on resource group naming recommendations and limitations.
+ $generatedGroup = "rg-{0}-$base" -f ($serviceName -replace '[\.\\\/:]', '-').
+ Substring(0, [Math]::Min($serviceDirectoryName.Length, 90 - $base.Length - 4)).
+ Trim('-').
+ ToLowerInvariant()
+ $group = $resourceGroupNameDefault ? $resourceGroupNameDefault : $generatedGroup
+
+ Log "Generated resource base name '$base' and resource group name '$group' for CI build"
+
+ return $base, $group
+ }
+
+ if ($baseNameDefault) {
+ $base = $baseNameDefault.ToLowerInvariant()
+ $group = $resourceGroupNameDefault ? $resourceGroupNameDefault : ("rg-$baseNameDefault".ToLowerInvariant())
+ return $base, $group
+ }
+
# Handle service directories in nested directories, e.g. `data/aztables`
$serviceDirectorySafeName = $serviceDirectoryName -replace '[\./\\]', ''
- return "$user$serviceDirectorySafeName".ToLowerInvariant()
+ # Seed off resource group name if set to avoid name conflicts with deployments where it is not set
+ $seed = $resourceGroupNameDefault ? $resourceGroupNameDefault : "${user}${serviceDirectorySafeName}"
+ $baseNameStream = [IO.MemoryStream]::new([Text.Encoding]::UTF8.GetBytes("$seed"))
+ # Hash to keep resource names short enough to not break naming restrictions (e.g. keyvault name length)
+ $base = 't' + (Get-FileHash -InputStream $baseNameStream -Algorithm SHA256).Hash.Substring(0, 16).ToLowerInvariant()
+ $group = $resourceGroupNameDefault ? $resourceGroupNameDefault : "rg-${user}${serviceDirectorySafeName}".ToLowerInvariant();
+
+ Log "BaseName was not set. Generating resource group name '$group' and resource base name '$base'"
+
+ return $base, $group
}
function ShouldMarkValueAsSecret([string]$serviceName, [string]$key, [string]$value, [array]$allowedValues = @())
diff --git a/eng/common/TestResources/Update-TestResources.ps1 b/eng/common/TestResources/Update-TestResources.ps1
index 7715ec4fcfb..a9983f547a9 100644
--- a/eng/common/TestResources/Update-TestResources.ps1
+++ b/eng/common/TestResources/Update-TestResources.ps1
@@ -69,17 +69,13 @@ $exitActions = @({
}
})
-# Make sure $ResourceGroupName is set.
-if (!$ResourceGroupName) {
- # Make sure $BaseName is set.
- if (!$BaseName) {
- $UserName = GetUserName
- $BaseName = GetBaseName $UserName $ServiceDirectory
- Log "BaseName was not set. Using default base name '$BaseName'"
- }
-
- $ResourceGroupName = "rg-$BaseName"
-}
+$serviceName = GetServiceLeafDirectoryName $ServiceDirectory
+$BaseName, $ResourceGroupName = GetBaseAndResourceGroupNames `
+ -baseNameDefault $BaseName `
+ -resourceGroupNameDefault $ResourceGroupName `
+ -user (GetUserName) `
+ -serviceDirectoryName $serviceName `
+ -CI $false
# This script is intended for interactive users. Make sure they are logged in or fail.
$context = Get-AzContext
diff --git a/eng/common/pipelines/templates/steps/cosmos-emulator.yml b/eng/common/pipelines/templates/steps/cosmos-emulator.yml
index f1faae362f4..3e21c76b339 100644
--- a/eng/common/pipelines/templates/steps/cosmos-emulator.yml
+++ b/eng/common/pipelines/templates/steps/cosmos-emulator.yml
@@ -1,4 +1,5 @@
parameters:
+ EmulatorInstallPath: "$(Agent.HomeDirectory)/../../Program Files/Azure Cosmos DB Emulator/Microsoft.Azure.Cosmos.Emulator.exe"
EmulatorMsiUrl: "https://aka.ms/cosmosdb-emulator"
StartParameters: ''
@@ -9,16 +10,7 @@ steps:
arguments: >
-EmulatorMsiUrl "${{ parameters.EmulatorMsiUrl }}"
-StartParameters "${{ parameters.StartParameters }}"
- -Stage "Install"
- pwsh: true
- displayName: Install Public Cosmos DB Emulator
-
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/scripts/Cosmos-Emulator.ps1
- arguments: >
- -EmulatorMsiUrl "${{ parameters.EmulatorMsiUrl }}"
- -StartParameters "${{ parameters.StartParameters }}"
+ -Emulator "${{ parameters.EmulatorInstallPath }}"
-Stage "Launch"
pwsh: true
displayName: Launch Public Cosmos DB Emulator
diff --git a/eng/common/scripts/Cadl-Project-Generate.ps1 b/eng/common/scripts/Cadl-Project-Generate.ps1
new file mode 100644
index 00000000000..d6adc59cefe
--- /dev/null
+++ b/eng/common/scripts/Cadl-Project-Generate.ps1
@@ -0,0 +1,93 @@
+# For details see https://github.com/Azure/azure-sdk-tools/blob/main/doc/common/Cadl-Project-Scripts.md
+
+[CmdletBinding()]
+param (
+ [Parameter(Position=0)]
+ [ValidateNotNullOrEmpty()]
+ [string] $ProjectDirectory
+)
+
+$ErrorActionPreference = "Stop"
+. $PSScriptRoot/Helpers/PSModule-Helpers.ps1
+. $PSScriptRoot/common.ps1
+Install-ModuleIfNotInstalled "powershell-yaml" "0.4.1" | Import-Module
+
+function NpmInstallForProject([string]$workingDirectory) {
+ Push-Location $workingDirectory
+ try {
+ $currentDur = Resolve-Path "."
+ Write-Host "Generating from $currentDur"
+
+ if (Test-Path "package.json") {
+ Remove-Item -Path "package.json" -Force
+ }
+
+ if (Test-Path ".npmrc") {
+ Remove-Item -Path ".npmrc" -Force
+ }
+
+ if (Test-Path "node_modules") {
+ Remove-Item -Path "node_modules" -Force -Recurse
+ }
+
+ if (Test-Path "package-lock.json") {
+ Remove-Item -Path "package-lock.json" -Force
+ }
+
+ #default to root/eng/emitter-package.json but you can override by writing
+ #Get-${Language}-EmitterPackageJsonPath in your Language-Settings.ps1
+ $replacementPackageJson = "$PSScriptRoot/../../emitter-package.json"
+ if (Test-Path "Function:$GetEmitterPackageJsonPathFn") {
+ $replacementPackageJson = &$GetEmitterPackageJsonPathFn
+ }
+
+ Write-Host("Copying package.json from $replacementPackageJson")
+ Copy-Item -Path $replacementPackageJson -Destination "package.json" -Force
+ npm install --no-lock-file
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+ }
+ finally {
+ Pop-Location
+ }
+}
+
+$resolvedProjectDirectory = Resolve-Path $ProjectDirectory
+$emitterName = &$GetEmitterNameFn
+$cadlConfigurationFile = Resolve-Path "$ProjectDirectory/cadl-location.yaml"
+
+Write-Host "Reading configuration from $cadlConfigurationFile"
+$configuration = Get-Content -Path $cadlConfigurationFile -Raw | ConvertFrom-Yaml
+
+$specSubDirectory = $configuration["directory"]
+$innerFolder = Split-Path $specSubDirectory -Leaf
+
+$tempFolder = "$ProjectDirectory/TempCadlFiles"
+$npmWorkingDir = Resolve-Path $tempFolder/$innerFolder
+$mainCadlFile = If (Test-Path "$npmWorkingDir/client.cadl") { Resolve-Path "$npmWorkingDir/client.cadl" } Else { Resolve-Path "$npmWorkingDir/main.cadl"}
+
+try {
+ Push-Location $npmWorkingDir
+ NpmInstallForProject $npmWorkingDir
+
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+
+ if (Test-Path "Function:$GetEmitterAdditionalOptionsFn") {
+ $emitterAdditionalOptions = &$GetEmitterAdditionalOptionsFn $resolvedProjectDirectory
+ if ($emitterAdditionalOptions.Length -gt 0) {
+ $emitterAdditionalOptions = " $emitterAdditionalOptions"
+ }
+ }
+ $cadlCompileCommand = "npx cadl compile $mainCadlFile --emit $emitterName$emitterAdditionalOptions"
+ Write-Host($cadlCompileCommand)
+ Invoke-Expression $cadlCompileCommand
+
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+}
+finally {
+ Pop-Location
+}
+
+$shouldCleanUp = $configuration["cleanup"] ?? $true
+if ($shouldCleanUp) {
+ Remove-Item $tempFolder -Recurse -Force
+}
\ No newline at end of file
diff --git a/eng/common/scripts/Cadl-Project-Sync.ps1 b/eng/common/scripts/Cadl-Project-Sync.ps1
new file mode 100644
index 00000000000..79527b5b11c
--- /dev/null
+++ b/eng/common/scripts/Cadl-Project-Sync.ps1
@@ -0,0 +1,127 @@
+# For details see https://github.com/Azure/azure-sdk-tools/blob/main/doc/common/Cadl-Project-Scripts.md
+
+[CmdletBinding()]
+param (
+ [Parameter(Position=0)]
+ [ValidateNotNullOrEmpty()]
+ [string] $ProjectDirectory
+)
+
+$ErrorActionPreference = "Stop"
+. $PSScriptRoot/Helpers/PSModule-Helpers.ps1
+Install-ModuleIfNotInstalled "powershell-yaml" "0.4.1" | Import-Module
+$sparseCheckoutFile = ".git/info/sparse-checkout"
+
+function AddSparseCheckoutPath([string]$subDirectory) {
+ if (!(Test-Path $sparseCheckoutFile) -or !((Get-Content $sparseCheckoutFile).Contains($subDirectory))) {
+ Write-Output $subDirectory >> .git/info/sparse-checkout
+ }
+}
+
+function CopySpecToProjectIfNeeded([string]$specCloneRoot, [string]$mainSpecDir, [string]$dest, [string[]]$specAdditionalSubDirectories) {
+ $source = "$specCloneRoot/$mainSpecDir"
+ Copy-Item -Path $source -Destination $dest -Recurse -Force
+ Write-Host "Copying spec from $source to $dest"
+
+ foreach ($additionalDir in $specAdditionalSubDirectories) {
+ $source = "$specCloneRoot/$additionalDir"
+ Write-Host "Copying spec from $source to $dest"
+ Copy-Item -Path $source -Destination $dest -Recurse -Force
+ }
+}
+
+function UpdateSparseCheckoutFile([string]$mainSpecDir, [string[]]$specAdditionalSubDirectories) {
+ AddSparseCheckoutPath $mainSpecDir
+ foreach ($subDir in $specAdditionalSubDirectories) {
+ AddSparseCheckoutPath $subDir
+ }
+}
+
+function GetGitRemoteValue([string]$repo) {
+ Push-Location $ProjectDirectory
+ $result = ""
+ try {
+ $gitRemotes = (git remote -v)
+ foreach ($remote in $gitRemotes) {
+ if ($remote.StartsWith("origin")) {
+ if ($remote -match 'https://github.com/\S+[\.git]') {
+ $result = "https://github.com/$repo.git"
+ break
+ } elseif ($remote -match "git@github.com:\S+[\.git]"){
+ $result = "git@github.com:$repo.git"
+ break
+ } else {
+ throw "Unknown git remote format found: $remote"
+ }
+ }
+ }
+ }
+ finally {
+ Pop-Location
+ }
+
+ return $result
+}
+
+function InitializeSparseGitClone([string]$repo) {
+ git clone --no-checkout --filter=tree:0 $repo .
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+ git sparse-checkout init
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+ Remove-Item $sparseCheckoutFile -Force
+}
+
+function GetSpecCloneDir([string]$projectName) {
+ Push-Location $ProjectDirectory
+ try {
+ $root = git rev-parse --show-toplevel
+ }
+ finally {
+ Pop-Location
+ }
+
+ $sparseSpecCloneDir = "$root/../sparse-spec/$projectName"
+ New-Item $sparseSpecCloneDir -Type Directory -Force | Out-Null
+ $createResult = Resolve-Path $sparseSpecCloneDir
+ return $createResult
+}
+
+$cadlConfigurationFile = Resolve-Path "$ProjectDirectory/cadl-location.yaml"
+Write-Host "Reading configuration from $cadlConfigurationFile"
+$configuration = Get-Content -Path $cadlConfigurationFile -Raw | ConvertFrom-Yaml
+
+$pieces = $cadlConfigurationFile.Path.Replace("\","/").Split("/")
+$projectName = $pieces[$pieces.Count - 2]
+
+$specSubDirectory = $configuration["directory"]
+
+if ( $configuration["repo"] -and $configuration["commit"]) {
+ $specCloneDir = GetSpecCloneDir $projectName
+ $gitRemoteValue = GetGitRemoteValue $configuration["repo"]
+
+ Write-Host "Setting up sparse clone for $projectName at $specCloneDir"
+
+ Push-Location $specCloneDir.Path
+ try {
+ if (!(Test-Path ".git")) {
+ InitializeSparseGitClone $gitRemoteValue
+ UpdateSparseCheckoutFile $specSubDirectory $configuration["additionalDirectories"]
+ }
+ git checkout $configuration["commit"]
+ if ($LASTEXITCODE) { exit $LASTEXITCODE }
+ }
+ finally {
+ Pop-Location
+ }
+} elseif ( $configuration["spec-root-dir"] ) {
+ $specCloneDir = $configuration["spec-root-dir"]
+}
+
+
+$tempCadlDir = "$ProjectDirectory/TempCadlFiles"
+New-Item $tempCadlDir -Type Directory -Force | Out-Null
+CopySpecToProjectIfNeeded `
+ -specCloneRoot $specCloneDir `
+ -mainSpecDir $specSubDirectory `
+ -dest $tempCadlDir `
+ -specAdditionalSubDirectories $configuration["additionalDirectories"]
diff --git a/eng/common/scripts/Cosmos-Emulator.ps1 b/eng/common/scripts/Cosmos-Emulator.ps1
index 3000e321259..853938aa7e5 100644
--- a/eng/common/scripts/Cosmos-Emulator.ps1
+++ b/eng/common/scripts/Cosmos-Emulator.ps1
@@ -9,7 +9,10 @@ This script downloads, installs and launches cosmosdb-emulator.
Uri for downloading the cosmosdb-emulator
.PARAMETER StartParameters
-Parameter with which to launch the cosmosdb-emulator
+Parameter with which to launch the cosmosdb-emulator\
+
+.PARAMETER Emulator
+Exact path to Microsoft.Azure.Cosmos.Emulator.exe
.PARAMETER Stage
Determines what part of the script to run. Has to be either Install or Launch
@@ -18,6 +21,7 @@ Determines what part of the script to run. Has to be either Install or Launch
Param (
[string] $EmulatorMsiUrl = "https://aka.ms/cosmosdb-emulator",
[string] $StartParameters,
+ [string] $Emulator,
[Parameter(Mandatory=$True)]
[ValidateSet('Install', 'Launch')]
[string] $Stage
@@ -26,7 +30,11 @@ Param (
$targetDir = Join-Path $Env:Temp AzureCosmosEmulator
$logFile = Join-Path $Env:Temp log.txt
$productName = "Azure Cosmos DB Emulator"
-$emulator = (Join-Path $targetDir (Join-Path $productName "Microsoft.Azure.Cosmos.Emulator.exe"))
+
+if ([string]::IsNullOrEmpty($Emulator))
+{
+ $Emulator = (Join-Path $targetDir (Join-Path $productName "Microsoft.Azure.Cosmos.Emulator.exe"))
+}
if ($Stage -eq "Install")
{
@@ -58,19 +66,19 @@ if ($Stage -eq "Install")
}
Write-Host "Getting Cosmos DB Emulator Version"
- $fileVersion = Get-ChildItem $emulator
- Write-Host $emulator $fileVersion.VersionInfo
+ $fileVersion = Get-ChildItem $Emulator
+ Write-Host $Emulator $fileVersion.VersionInfo
}
if ($Stage -eq "Launch")
{
Write-Host "Launching Cosmos DB Emulator"
- if (!(Test-Path $emulator)) {
- Write-Error "The emulator is not installed where expected at '$emulator'"
+ if (!(Test-Path $Emulator)) {
+ Write-Error "The emulator is not installed where expected at '$Emulator'"
return
}
- $process = Start-Process $emulator -ArgumentList "/getstatus" -PassThru -Wait
+ $process = Start-Process $Emulator -ArgumentList "/getstatus" -PassThru -Wait
switch ($process.ExitCode) {
1 {
Write-Host "The emulator is already starting"
@@ -97,8 +105,8 @@ if ($Stage -eq "Launch")
$argumentList = "/noexplorer /noui /enablepreview /disableratelimiting /enableaadauthentication"
}
- Write-Host "Starting emulator process: $emulator $argumentList"
- $process = Start-Process $emulator -ArgumentList $argumentList -ErrorAction Stop -PassThru
+ Write-Host "Starting emulator process: $Emulator $argumentList"
+ $process = Start-Process $Emulator -ArgumentList $argumentList -ErrorAction Stop -PassThru
Write-Host "Emulator process started: $($process.Name), $($process.FileVersion)"
$Timeout = 600
@@ -117,7 +125,7 @@ if ($Stage -eq "Launch")
}
do {
- $process = Start-Process $emulator -ArgumentList "/getstatus" -PassThru -Wait
+ $process = Start-Process $Emulator -ArgumentList "/getstatus" -PassThru -Wait
switch ($process.ExitCode) {
1 {
Write-Host "The emulator is starting"
diff --git a/eng/common/scripts/common.ps1 b/eng/common/scripts/common.ps1
index c78b1cfc2f1..8f9c707ee29 100644
--- a/eng/common/scripts/common.ps1
+++ b/eng/common/scripts/common.ps1
@@ -57,3 +57,6 @@ $GetDocsMsTocChildrenForManagementPackagesFn = "Get-${Language}-DocsMsTocChildre
$UpdateDocsMsTocFn = "Get-${Language}-UpdatedDocsMsToc"
$GetPackageLevelReadmeFn = "Get-${Language}-PackageLevelReadme"
$GetRepositoryLinkFn = "Get-${Language}-RepositoryLink"
+$GetEmitterAdditionalOptionsFn = "Get-${Language}-EmitterAdditionalOptions"
+$GetEmitterNameFn = "Get-${Language}-EmitterName"
+$GetEmitterPackageJsonPathFn = "Get-${Language}-EmitterPackageJsonPath"
diff --git a/eng/common/testproxy/publish-proxy-logs.yml b/eng/common/testproxy/publish-proxy-logs.yml
new file mode 100644
index 00000000000..543527a4437
--- /dev/null
+++ b/eng/common/testproxy/publish-proxy-logs.yml
@@ -0,0 +1,16 @@
+parameters:
+ rootFolder: '$(Build.SourcesDirectory)'
+
+steps:
+ - pwsh: |
+ Copy-Item -Path "${{ parameters.rootFolder }}/test-proxy.log" -Destination "${{ parameters.rootFolder }}/proxy.log"
+ displayName: Copy Log File
+
+ - template: ../pipelines/templates/steps/publish-artifact.yml
+ parameters:
+ ArtifactName: "$(System.JobName)-proxy-logs"
+ ArtifactPath: "${{ parameters.rootFolder }}/proxy.log"
+
+ - pwsh: |
+ Remove-Item -Force ${{ parameters.rootFolder }}/proxy.log
+ displayName: Cleanup Copied Log File
diff --git a/eng/common/testproxy/test-proxy-tool.yml b/eng/common/testproxy/test-proxy-tool.yml
index 293848da122..ae9d6905153 100644
--- a/eng/common/testproxy/test-proxy-tool.yml
+++ b/eng/common/testproxy/test-proxy-tool.yml
@@ -42,13 +42,13 @@ steps:
- pwsh: |
Start-Process $(Build.BinariesDirectory)/test-proxy/test-proxy.exe `
-ArgumentList "--storage-location ${{ parameters.rootFolder }}" `
- -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.templateRoot }}/test-proxy.log
+ -NoNewWindow -PassThru -RedirectStandardOutput ${{ parameters.rootFolder }}/test-proxy.log
displayName: 'Run the testproxy - windows'
condition: and(succeeded(), eq(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }})
# nohup does NOT continue beyond the current session if you use it within powershell
- bash: |
- nohup $(Build.BinariesDirectory)/test-proxy/test-proxy > ${{ parameters.templateRoot }}/test-proxy.log &
+ nohup $(Build.BinariesDirectory)/test-proxy/test-proxy > ${{ parameters.rootFolder }}/test-proxy.log &
displayName: "Run the testproxy - linux/mac"
condition: and(succeeded(), ne(variables['Agent.OS'],'Windows_NT'), ${{ parameters.condition }})
workingDirectory: "${{ parameters.rootFolder }}"
diff --git a/eng/common/testproxy/transition-scripts/README.md b/eng/common/testproxy/transition-scripts/README.md
new file mode 100644
index 00000000000..862d22ed6de
--- /dev/null
+++ b/eng/common/testproxy/transition-scripts/README.md
@@ -0,0 +1,131 @@
+# Transitioning recording assets from language repositories into
+
+## Setting some context
+
+The azure-sdk monorepos are growing quickly due to the presence of recordings. Due to this, the engineering system team has been tasked with providing a mechanism that allows recordings to live _elsewhere_. The actual implementation of this goal is already present within the `test-proxy` tool, and this document reflects how to TRANSITION to storing recordings elsewhere!
+
+The script `generate-assets-json.ps1` will execute the initial migration of your recordings from within a language repo to the [assets repo](https://github.com/Azure/azure-sdk-assets) as well as creating the assets.json file for those assets.
+
+The script is [generate-assets-json.ps1](https://github.com/Azure/azure-sdk-tools/blob/main/eng/common/testproxy/transition-scripts/generate-assets-json.ps1)
+
+### Download the transition script locally
+
+```powershell
+Invoke-WebRequest -OutFile "generate-assets-json.ps1" https://raw.githubusercontent.com/Azure/azure-sdk-tools/main/eng/common/testproxy/transition-scripts/generate-assets-json.ps1
+```
+
+```bash
+wget https://raw.githubusercontent.com/Azure/azure-sdk-tools/main/eng/common/testproxy/transition-scripts/generate-assets-json.ps1 -o generate-assets-json.ps1
+```
+
+## Setup
+
+Before running the script, understand that **only services that have migrated to use the `test-proxy` as their record/playback solution can store recordings into the external assets repository.** The test-proxy itself contains the code for `restoring`/`push`ing recordings, so if it is NOT being used for record/playback, that work must be completed before recordings can be moved.
+
+Running the script requires these base requirements.
+
+- [x] The targeted library is already migrated to use the test-proxy.
+- [x] Git version `>2.25.0` needs to be on the machine and in the path. Git is used by the script and test-proxy.
+- [x] [Powershell Core](https://learn.microsoft.com/powershell/scripting/install/installing-powershell?view=powershell-7.2) at least version 7.
+- [x] Ensure global git config settings for `user.name` and `user.email` are updated. [Reference](https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup)
+ - Override with environment variables `GIT_COMMIT_EMAIL` and `GIT_COMMIT_OWNER`. If either of these are set, they will override the default values pulled from `git config --global`.
+
+Once the above requirements are met, developers are welcome to choose one of the following paths.
+
+### `test-proxy` dotnet tool installed and called directly
+
+Provide `TestProxyExe` argument of `test-proxy` or leave it **blank**. This is the default use-case of this transition script.
+
+- [x] Test-proxy needs to be on the machine and in the path. Instructions for that are [here](https://github.com/Azure/azure-sdk-tools/blob/main/tools/test-proxy/Azure.Sdk.Tools.TestProxy/README.md#installation).
+
+The newly installed test-proxy tool will be used during the recording migration portion of this script.
+
+### `docker` or `podman` invocation
+
+To utilize this methodology, the user must set input argument `TestProxyExe` to `docker` or `podman`.
+
+Other requirements:
+
+- [x] Install [docker](https://docs.docker.com/engine/install/) or [podman](https://podman.io/getting-started/installation.html)
+- [x] Set the environment variable `GIT_TOKEN` a valid token representing YOUR user
+
+## Permissions
+
+Check your github group membership. If you are part of the group `azure-sdk-write` directly or through a sub-team, you have the necessary permissions to create tags in the assets repository.
+
+You will not be able to clean them up however. There exists [planned work](https://github.com/Azure/azure-sdk-tools/issues/4298) to clean up unused assets repo tags. Erroneously pushed tags will be auto cleaned.
+
+## Nomenclature
+
+- `language` repo - An individual language repository eg. azure-sdk-for-python or azure-sdk-for-net etc.
+- `assets` repo - The repository where assets are being moved to.
+
+The `test-proxy` tool is integrated with the ability to automatically restore these assets. This process is kick-started by the presence of an `assets.json` alongside a dev's actual code. This means that while assets will be cloned down externally, the _map_ to those assets will be stored alongside the tests. Normally, it is recommended to create an `assets.json` under the path `sdk/`. However, more granular storage is also possible.
+
+Service/Package-Level examples:
+
+- `sdk/storage/assets.json`
+- `sdk/storage/azure-storage-file-datalake/assets.json`
+
+The location of the actual test code is referred to as the `language repo`.
+
+The location of the automatically restored assets is colloquially referred to as the `assets repo`. There is an individual `assets repo` cloned for **each `assets.json` in the language repo.**
+
+## Running the script
+
+[generate-assets-json.ps1](https://github.com/Azure/azure-sdk-tools/blob/main/eng/common/testproxy/transition-scripts/generate-assets-json.ps1) is a standalone powershell script with no supporting script requirements. The easiest way to run the script would be to use a one-liner [defined above](#download-the-transition-script-locally) to grab the file directly. **Please ensure you have the newest version of this script before continuing!**
+
+```powershell
+# if downloading the file singly, cd to the directory containing generate-assets-json.ps1
+cd "/sdk/"
+/generate-assets-json.ps1
+```
+
+The script needs to be executed inside an `sdk/` or deeper and from within an up to date language repository. A good rule here would be look at where the ci.yml is for an service directory. In the case where each library for a given service directory has their own pipelines, at the `sdk//` level, it is recommended that the assets.json is created there. If the `ci.yml` exists deeper than the `sdk//` level, then it is recommended to run the script from that directory.
+
+```powershell
+# calling transition script against tool, given local clones of azure-sdk-for-java and azure-sdk-tools
+cd c:/src/azure-sdk-for-java/sdk/attestation
+/generate-assets-json.ps1 -InitialPush
+```
+
+```powershell
+# calling transition script against docker, given local clones of azure-sdk-for-java and azure-sdk-tools
+$env:GIT_TOKEN="my git token"
+cd c:/src/azure-sdk-for-java/sdk/attestation
+/generate-assets-json.ps1 -TestProxyExe "docker" -InitialPush
+```
+
+After running a script, executing a `git status` from within the language repo, where the script was invoked from, will reflect two primary results:
+
+- A new `assets.json` present in the directory from which they invoked the transition script.
+- A **bunch** of deleted files from where their recordings _were_ before they were pushed to the assets repo.
+
+Running the script without the `-InitialPush` option will just create the assets.json with an empty tag. No data movement.
+
+### What's the script doing behind the scenes?
+
+Given the previous example of `sdk/attestation` transition script invocation, users should see the following:
+
+- Creation of the assets.json file in the `sdk/attestation` directory.
+ - If `-InitialPush` has not been specified, the script stops here and exits.
+- test-proxy's CLI restore is called on the current assets.json. Since there's nothing there, it'll just initialize an empty assets directory under the `.assets` directory under repo root.
+- The recordings are moved from their initial directories within the language repo into a temp directory that was created in the previous step.
+ - The relative paths from root are preserved.
+ - For example, the recordings for `C:/src/azure-sdk-for-python/sdk/tables` live in the `azure-data-tables/tests/recordings` subdirectory and in the target repository they'll live in `python/sdk/tables/azure-data-tables/tests/recordings`. All the azure-sdk supported languages will leverage [Azure/azure-sdk-assets](https://github.com/Azure/azure-sdk-assets), so adding a prefix to the output path `python` ensures that these recordings can live alongside others in the assets repo.
+- Call `test-proxy push` on the assets.json created in the first step. The push will happen automatically and not require a manual PR.
+ - On completion of the push, the newly created tag will be stamped into the assets.json.
+
+At this point the script is complete. The assets.json and deleted recording files will need to be pushed into the language repository as a manual PR.
+
+#### Why does the script analyze the remotes to compute the language?
+
+This is necessary because the language is used in several places.
+
+1. The AssetsRepoPrefixPath in assets.json is set to the language.
+2. The TagPrefix is set to the `/` or `//` etc.
+3. The language also used to determine what the [recording directories within a repository are named](https://github.com/Azure/azure-sdk-tools/blob/main/eng/common/testproxy/transition-scripts/generate-assets-json.ps1#L47).
+
+## A final note about the initial push
+
+If a directory with several thousand recordings is being migrated, the move and the initial push can take several minutes. For example, java storage recordings were used as a stress test. There are 4,693 files, with a combined size of 666 MB, and the initial push took about 7 minutes. This is a one time cost as the files do not exist yet within the assets repository. Subsequent pushes should have dramatically reduced push time.
diff --git a/tools/test-proxy/scripts/transition-scripts/generate-assets-json.ps1 b/eng/common/testproxy/transition-scripts/generate-assets-json.ps1
similarity index 100%
rename from tools/test-proxy/scripts/transition-scripts/generate-assets-json.ps1
rename to eng/common/testproxy/transition-scripts/generate-assets-json.ps1
diff --git a/eng/dotnet-executable-entitlements.plist b/eng/dotnet-executable-entitlements.plist
new file mode 100644
index 00000000000..d0fc6e5d3aa
--- /dev/null
+++ b/eng/dotnet-executable-entitlements.plist
@@ -0,0 +1,12 @@
+
+
+
+
+ com.apple.security.cs.allow-dyld-environment-variables
+ com.apple.security.cs.allow-jit
+ com.apple.security.cs.debugger
+ com.apple.security.cs.disable-library-validation
+ com.apple.security.get-task-allow
+
+
+
\ No newline at end of file
diff --git a/eng/label-to-project.config b/eng/label-to-project.config
index 79c6c468486..92e02c4f892 100644
--- a/eng/label-to-project.config
+++ b/eng/label-to-project.config
@@ -7,15 +7,15 @@
# In order to avoid a graphql node id lookup on every label action we should gather
# that once and provide the value in this configuration.
# To get the ProjectId goto https://docs.github.com/graphql/overview/explorer and run query
-# "{ organization(login: "Azure") { projectNext(number: ) { id } } }"
+# "{ organization(login: "Azure") { projectV2(number: ) { id } } }"
# replacing with your project to get project node id.
#
# You can also run the Add-IssuesWithLabelToProject.ps1 script to add the initial
# set of issues the project as well as to get the ProjectId for your project.
"Label", "Project", "ProjectId"
-"Central-EngSys", "https://github.com/orgs/Azure/projects/162", "PN_kwDOAGhwUs4ABj5U"
-"ApiView", "https://github.com/orgs/Azure/projects/197", "PN_kwDOAGhwUs4ACf4D"
-"Swagger Tools", "https://github.com/orgs/Azure/projects/198", "PN_kwDOAGhwUs4ACgMI"
-"Scheduling tool", "https://github.com/orgs/Azure/projects/231", "PN_kwDOAGhwUs4AD8BB"
-"Scheduling tool Admin", "https://github.com/orgs/Azure/projects/231", "PN_kwDOAGhwUs4AD8BB"
-"Engagement Experience", "https://github.com/orgs/Azure/projects/318", "PN_kwDOAGhwUs4AFzVw"
+"Central-EngSys", "https://github.com/orgs/Azure/projects/162", "PVT_kwDOAGhwUs4ABj5U"
+"ApiView", "https://github.com/orgs/Azure/projects/197", "PVT_kwDOAGhwUs4ACf4D"
+"Swagger Tools", "https://github.com/orgs/Azure/projects/198", "PVT_kwDOAGhwUs4ACgMI"
+"Scheduling tool", "https://github.com/orgs/Azure/projects/231", "PVT_kwDOAGhwUs4AD8BB"
+"Scheduling tool Admin", "https://github.com/orgs/Azure/projects/231", "PVT_kwDOAGhwUs4AD8BB"
+"Engagement Experience", "https://github.com/orgs/Azure/projects/318", "PVT_kwDOAGhwUs4AFzVw"
diff --git a/eng/pipelines/apiview-review-gen-swagger.yml b/eng/pipelines/apiview-review-gen-swagger.yml
index 431e10f8f7a..4c3b02587f3 100644
--- a/eng/pipelines/apiview-review-gen-swagger.yml
+++ b/eng/pipelines/apiview-review-gen-swagger.yml
@@ -27,7 +27,7 @@ jobs:
- script: >
dotnet tool install
Azure.Sdk.Tools.SwaggerApiParser
- --version 1.0.0-dev.0.4
+ --version 1.0.5-dev.20230131.3
--add-source https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-net/nuget/v3/index.json
--tool-path $(SwaggerParserInstallPath)
workingDirectory: $(SwaggerParserInstallPath)
diff --git a/eng/pipelines/templates/jobs/azuresdkpartnerdrops-to-nugetfeed.yml b/eng/pipelines/templates/jobs/azuresdkpartnerdrops-to-nugetfeed.yml
index 13f5835c3ab..423bdec44ea 100644
--- a/eng/pipelines/templates/jobs/azuresdkpartnerdrops-to-nugetfeed.yml
+++ b/eng/pipelines/templates/jobs/azuresdkpartnerdrops-to-nugetfeed.yml
@@ -4,7 +4,7 @@ resources:
- repository: azure-sdk-build-tools
type: git
name: internal/azure-sdk-build-tools
- ref: refs/tags/azure-sdk-build-tools_20230112.1
+ ref: refs/tags/azure-sdk-build-tools_20230201.1
parameters:
- name: BuildToolsRepoPath
diff --git a/eng/pipelines/templates/scripts/assemble-dotnet-standalone-exe.ps1 b/eng/pipelines/templates/scripts/assemble-dotnet-standalone-exe.ps1
new file mode 100644
index 00000000000..6de6cb648a1
--- /dev/null
+++ b/eng/pipelines/templates/scripts/assemble-dotnet-standalone-exe.ps1
@@ -0,0 +1,95 @@
+<#
+.SYNOPSIS
+Publishes a standalone dotnet executable to an artifact staging directory.
+
+.DESCRIPTION
+Assembles a standalone executable and places it within the given staging directory. This script takes care of any additional minutae that is required to
+enable a usable binary down the line after signing or notarization.
+
+.PARAMETER Rid
+The target platform. Takes the form of "osx-x64", "win-arm64", "linux-x64", etc. A full list is available here: https://learn.microsoft.com/en-us/dotnet/core/rid-catalog
+
+.PARAMETER ArtifactStagingDirectory
+The root directory which will receive the compressed standalone executable.
+
+.PARAMETER Target
+The targeted folder that should be built and assembled into a standalone executable.
+
+.PARAMETER Framework
+The targeted .NET framework. Defaults to "net6.0."
+
+#>
+param(
+ [Parameter(mandatory=$true)]
+ [string] $Rid,
+ [Parameter(mandatory=$true)]
+ [string] $Target,
+ [Parameter(mandatory=$true)]
+ [string] $ArtifactStagingDirectory,
+ [Parameter(mandatory=$true)]
+ [string] $AssemblyName,
+ [Parameter(mandatory=$false)]
+ [string] $Framework = "net6.0"
+)
+
+# resolves to /win-x64
+$destinationArtifactFolder = Join-Path $ArtifactStagingDirectory $Rid
+
+# resolves to /win-x64/test-proxy-standalone-win-x64 (.zip or .tar.gz will be added as appropriate for platform)
+$destinationPathSegment = Join-Path $destinationArtifactFolder "$(Split-Path -Leaf "$Target")-standalone-$Rid"
+
+# resolves to tools/test-proxy/win-x64
+$outputPath = Join-Path $Target $Rid
+
+# ensure the destination artifact directory exists
+if (!(Test-Path $destinationArtifactFolder)){
+ New-Item -Force -Path $destinationArtifactFolder -ItemType directory
+}
+
+Write-Host "dotnet publish -f $Framework -c Release -r $Rid -p:PublishSingleFile=true --self-contained --output $outputPath $Target"
+dotnet publish -f $Framework -c Release -r $Rid -p:PublishSingleFile=true --self-contained --output $outputPath $Target
+
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "dotnet publish failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+}
+
+# produce a tar.gz only for linux
+if ("$($Rid)".Contains("linux")){
+ # tar on powershell in linux has some weirdness. For instance, this is a proper call to tar when we don't want to include the relative path to the target folder
+ # tar -cvzf -C tools/test-proxy/linux-arm64 blah.tar.gz tools/test-proxy/linux-arm64
+ # however when we use this, we actually get an error. To avoid this, we simply CD into the target directory before tar-ing it.
+ Push-Location "$outputPath"
+ # The sum contents within this folder will be: `appSettings.json`, `test-proxy.pdb`, `test-proxy` (the binary), and a certificate.
+ # This statement grabs the first extensionless file within the produced binary folder, which will always be the binary we need to set the executable bit on.
+ $binaryFile = (Get-ChildItem -Path . | Where-Object { $_.Name -eq $AssemblyName } | Select-Object -First 1).ToString().Replace("`\","/")
+
+ bash -c "chmod +x $binaryFile"
+ tar -cvzf "$($destinationPathSegment).tar.gz" .
+ Pop-Location
+}
+elseif("$($Rid)".Contains("osx")){
+ # need to codesign the binary with an entitlements file such that the signed and notarized binary will properly invoke on
+ # a mac system. However, the `codesign` command is only available on a MacOS agent. With that being the case, we simply special case
+ # this function here to ensure that the script does not fail outside of a MacOS agent.
+ if ($IsMacOS) {
+ $binaryFile = Get-ChildItem -Path $outputPath | Where-Object { $_.Name -eq $AssemblyName } | Select-Object -First 1
+ $binaryFileBash = $binaryFile.ToString().Replace("`\","/")
+
+ $entitlements = (Resolve-Path -Path (Join-Path $PSScriptRoot ".." ".." ".." "dotnet-executable-entitlements.plist")).ToString().Replace("`\", "/")
+
+ bash -c "codesign --deep -s - -f --options runtime --entitlements $($entitlements) $($binaryFileBash)"
+ bash -c "codesign -d --entitlements :- $($binaryFileBash)"
+ }
+
+ Compress-Archive -Path "$($outputPath)/*" -DestinationPath "$($destinationPathSegment).zip"
+}
+else {
+ Compress-Archive -Path "$($outputPath)/*" -DestinationPath "$($destinationPathSegment).zip"
+}
+
+# clean up the uncompressed artifact directory
+Remove-Item -Recurse -Force -Path $outputPath
+
+
+
diff --git a/eng/pipelines/templates/stages/archetype-sdk-tool-dotnet.yml b/eng/pipelines/templates/stages/archetype-sdk-tool-dotnet.yml
index 77eb3019ec3..c2082a4aefe 100644
--- a/eng/pipelines/templates/stages/archetype-sdk-tool-dotnet.yml
+++ b/eng/pipelines/templates/stages/archetype-sdk-tool-dotnet.yml
@@ -3,7 +3,7 @@ resources:
- repository: azure-sdk-build-tools
type: git
name: internal/azure-sdk-build-tools
- ref: refs/tags/azure-sdk-build-tools_20230112.1
+ ref: refs/tags/azure-sdk-build-tools_20230201.1
parameters:
- name: ToolDirectory
@@ -76,12 +76,6 @@ stages:
DOTNET_CLI_TELEMETRY_OPTOUT: 1
DOTNET_MULTILEVEL_LOOKUP: 0
- - template: /eng/pipelines/templates/steps/produce-net-standalone-packs.yml
- parameters:
- StagingDirectory: $(Build.ArtifactStagingDirectory)
- BuildMatrix: ${{ parameters.StandaloneExeMatrix }}
- TargetDirectory: '${{ coalesce(parameters.PackageDirectory, parameters.ToolDirectory) }}'
-
# This step creates "$(packagesToPublishDir)" directory if it doesn't exist.
# This step is necessary since migration to net6.0. This is because since net6.0,
# in case the "Build and Package" above would not output any packages to this directory,
@@ -102,11 +96,33 @@ stages:
artifact: packages
condition: succeededOrFailed()
- - ${{ if gt(length(parameters.StandaloneExeMatrix), 0) }}:
- - publish: $(Build.ArtifactStagingDirectory)/binaries
- displayName: Publish executables to binaries artifact
- artifact: binaries
- condition: succeededOrFailed()
+ - job: Produce_Executables
+
+ strategy:
+ matrix:
+ linux:
+ imageName: 'ubuntu-22.04'
+ poolName: 'azsdk-pool-mms-ubuntu-2204-general'
+ artifactName: 'linux_windows'
+ mac:
+ imageName: 'macos-11'
+ poolName: 'Azure Pipelines'
+ artifactName: 'mac'
+
+ pool:
+ name: $(poolName)
+ vmImage: $(imageName)
+
+ steps:
+ - template: /eng/pipelines/templates/steps/install-dotnet.yml
+ parameters:
+ DotNetCoreVersion: ${{ parameters.DotNetCoreVersion }}
+
+ - template: /eng/pipelines/templates/steps/produce-net-standalone-packs.yml
+ parameters:
+ StagingDirectory: $(Build.ArtifactStagingDirectory)
+ BuildMatrix: ${{ parameters.StandaloneExeMatrix }}
+ TargetDirectory: '${{ coalesce(parameters.PackageDirectory, parameters.ToolDirectory) }}'
- job: Test
diff --git a/eng/pipelines/templates/steps/apiview-ui-tests.yml b/eng/pipelines/templates/steps/apiview-ui-tests.yml
index bcb6bccc3fa..b5df11b7b66 100644
--- a/eng/pipelines/templates/steps/apiview-ui-tests.yml
+++ b/eng/pipelines/templates/steps/apiview-ui-tests.yml
@@ -15,17 +15,12 @@ parameters:
type: string
default: ''
-
steps:
- task: NodeTool@0
inputs:
versionSpec: '${{ parameters.NodeVersion }}'
displayName: 'Use NodeJS ${{ parameters.NodeVersion }}'
-- script: |
- npm install -g npm@6.14.6
- displayName: "Install npm 6.14.6"
-
- script: |
npm install
workingDirectory: '${{ parameters.WebClientProjectDirectory }}'
@@ -70,6 +65,18 @@ steps:
env:
AZCOPY_SPA_CLIENT_SECRET: $(apiviewstorageaccess-service-principal-key)
+- task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/scripts/copy-from-blobstorage.ps1
+ arguments: >
+ -SourceBlobPath '${{ parameters.TestingDataContainer }}'
+ -ApplicationId $(apiviewstorageaccess-application-id)
+ -DestinationDirectory $(Build.BinariesDirectory)
+ pwsh: true
+ displayName: 'Copy from Test Files From Blob'
+ env:
+ AZCOPY_SPA_CLIENT_SECRET: $(apiviewstorageaccess-service-principal-key)
+
- task: DotNetCoreCLI@2
displayName: 'Build & Test (UI)'
env:
diff --git a/eng/pipelines/templates/steps/produce-net-standalone-packs.yml b/eng/pipelines/templates/steps/produce-net-standalone-packs.yml
index cd6d21a49e9..ef13615a1cc 100644
--- a/eng/pipelines/templates/steps/produce-net-standalone-packs.yml
+++ b/eng/pipelines/templates/steps/produce-net-standalone-packs.yml
@@ -8,57 +8,67 @@ parameters:
type: object
default: []
-# A `BuildMatrix` is merely a list of possible targeted platforms. .NET 6 can build for any target from any other target.
+# A `BuildMatrix` is merely a list of possible targeted platforms. .NET 6+ can build for any target from any other target.
# - rid: win-x64
# framework: net6.0
+ # assembly: Azure.Sdk.Tools.TestProxy
# - rid: linux-x64
# framework: net6.0
+ # assembly: Azure.Sdk.Tools.TestProxy
# - rid: osx-x64
# framework: net6.0
+ # assembly: Azure.Sdk.Tools.TestProxy
steps:
- - pwsh: |
- New-Item -ItemType Directory -Path "$(Build.ArtifactStagingDirectory)/binaries" -Force
- Write-Host "Created directory ""$(Build.ArtifactStagingDirectory)/binaries"""
- displayName: Create .NET standalone packs destination directory
-
- ${{ each target in parameters.BuildMatrix }}:
- - pwsh: |
- $destinationPathSegment = "$(Build.ArtifactStagingDirectory)/binaries/$((Split-Path -Leaf "${{ parameters.TargetDirectory }}"))-standalone-${{ target.rid }}"
- $sourcePath = "${{ parameters.TargetDirectory }}/${{ target.rid }}"
-
- Write-Host "dotnet publish -f ${{ target.framework }} -c Release -r ${{ target.rid }} --self-contained -p:PublishSingleFile=true --output ${{ parameters.TargetDirectory }}/${{ target.rid }}"
- dotnet publish -f ${{ target.framework }} -c Release -r ${{ target.rid }} --self-contained -p:PublishSingleFile=true --output ${{ parameters.TargetDirectory }}/${{ target.rid }}
-
- if ($LASTEXITCODE -ne 0) {
- Write-Error "dotnet publish failed with exit code $LASTEXITCODE."
- exit $LASTEXITCODE
- }
-
- # produce a tar.gz only for linux
- if ("${{ target.rid }}".Contains("linux")){
- # tar on powershell in linux has some weirdness. For instance, this is a proper call to tar when we don't want to include the relative path to the target folder
- # tar -cvzf -C tools/test-proxy/linux-arm64 blah.tar.gz tools/test-proxy/linux-arm64
- # however when we use this, we actually get an error. To avoid this, we simply CD into the target directory before tar-ing it.
- Push-Location "$sourcePath"
- # The sum contents within this folder will be: `appSettings.json`, `test-proxy.pdb`, `test-proxy` (the binary), and a certificate.
- # This statement grabs the first extensionless file within the produced binary folder, which will always be the binary we need to set the executable bit on.
- $binaryFile = (Get-ChildItem -Path . | Where-Object { !([System.IO.Path]::HasExtension($_)) } | Select-Object -First 1).ToString().Replace("`\","/")
- bash -c "chmod +x $binaryFile"
- tar -cvzf "$($destinationPathSegment).tar.gz" .
- Pop-Location
- }
- else {
- Compress-Archive -Path "$($sourcePath)/*" -DestinationPath "$($destinationPathSegment).zip"
- }
+ # ensure that we only build the mac standalone executables on a mac agent, everything else on not mac
+ - ${{ if startsWith(target.rid, 'osx') }}:
+ - task: Powershell@2
+ inputs:
+ workingDirectory: "$(Build.SourcesDirectory)"
+ filePath: $(Build.SourcesDirectory)/eng/pipelines/templates/scripts/assemble-dotnet-standalone-exe.ps1
+ arguments: >
+ -Rid "${{ target.rid }}"
+ -Target "${{ parameters.TargetDirectory }}"
+ -ArtifactStagingDirectory "$(Build.ArtifactStagingDirectory)"
+ -Framework "${{ target.framework }}"
+ -AssemblyName "${{ target.assembly }}"
+ pwsh: true
+ displayName: 'Produce Executable for ${{ target.rid }}'
+ condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
+ env:
+ DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
+ DOTNET_CLI_TELEMETRY_OPTOUT: 1
+ DOTNET_MULTILEVEL_LOOKUP: 0
- # clean up the uncompressed artifact directory
- Remove-Item -Recurse -Force -Path $sourcePath
- displayName: 'Produce Executable for ${{ target.rid }}'
- workingDirectory: "${{ parameters.TargetDirectory }}"
- env:
- DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
- DOTNET_CLI_TELEMETRY_OPTOUT: 1
- DOTNET_MULTILEVEL_LOOKUP: 0
+ - template: /eng/common/pipelines/templates/steps/publish-artifact.yml
+ parameters:
+ ArtifactName: "standalone-${{ target.rid }}"
+ ArtifactPath: "$(Build.ArtifactStagingDirectory)/${{ target.rid }}"
+ customCondition: eq(variables['Agent.OS'], 'Darwin')
+ - ${{ if not(startswith(target.rid, 'osx')) }}:
+ - task: Powershell@2
+ inputs:
+ workingDirectory: "$(Build.SourcesDirectory)"
+ filePath: $(Build.SourcesDirectory)/eng/pipelines/templates/scripts/assemble-dotnet-standalone-exe.ps1
+ arguments: >
+ -Rid "${{ target.rid }}"
+ -Target "${{ parameters.TargetDirectory }}"
+ -ArtifactStagingDirectory "$(Build.ArtifactStagingDirectory)"
+ -Framework "${{ target.framework }}"
+ -AssemblyName "${{ target.assembly }}"
+ pwsh: true
+ displayName: 'Produce Executable for ${{ target.rid }}'
+ condition: and(succeeded(), not(eq(variables['Agent.OS'], 'Darwin')))
+ env:
+ DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
+ DOTNET_CLI_TELEMETRY_OPTOUT: 1
+ DOTNET_MULTILEVEL_LOOKUP: 0
+
+ - template: /eng/common/pipelines/templates/steps/publish-artifact.yml
+ parameters:
+ ArtifactName: "standalone-${{ target.rid }}"
+ ArtifactPath: "$(Build.ArtifactStagingDirectory)/${{ target.rid }}"
+ CustomCondition: not(eq(variables['Agent.OS'], 'Darwin'))
diff --git a/eng/pipelines/templates/variables/globals.yml b/eng/pipelines/templates/variables/globals.yml
index ee14224a298..91adfb03423 100644
--- a/eng/pipelines/templates/variables/globals.yml
+++ b/eng/pipelines/templates/variables/globals.yml
@@ -1,5 +1,5 @@
variables:
OfficialBuildId: $(Build.BuildNumber)
skipComponentGovernanceDetection: true
- NotificationsCreatorVersion: '1.0.0-dev.20230119.1'
+ NotificationsCreatorVersion: '1.0.0-dev.20230127.4'
PipelineOwnersExtractorVersion: '1.0.0-dev.20230105.1'
diff --git a/eng/scripts/Github-Project-Helpers.ps1 b/eng/scripts/Github-Project-Helpers.ps1
index 40f9c89e89b..6062df5d004 100644
--- a/eng/scripts/Github-Project-Helpers.ps1
+++ b/eng/scripts/Github-Project-Helpers.ps1
@@ -5,23 +5,27 @@ function Get-GithubProjectId([string] $project)
# https://github.com/users//projects/
# or just a number in which case default to Azure as the org
$projectId = ""
- if ($project -match "((orgs/(?.*))|(users/(?.*))/projects/)?(?\d+)$")
+ if ($project -match "(((orgs/(?.*))|(users/(?.*)))/projects/)?(?\d+)$")
{
$projectNumber = $matches["number"]
if ($matches["user"]) {
$name = $matches["user"]
- $projectQuery = 'query($name: String!, $number: Int!) { user(login: $name) { projectNext(number: $number) { id } } }'
- $selectQuery = ".data.user.projectNext.id"
+ $projectQuery = 'query($name: String!, $number: Int!) { user(login: $name) { projectV2(number: $number) { id } } }'
+ $selectQuery = ".data.user.projectV2.id"
}
else {
$name = $matches["org"]
$name ??= "Azure"
- $projectQuery = 'query($name: String!, $number: Int!) { organization(login: $name) { projectNext(number: $number) { id } } }'
- $selectQuery = ".data.organization.projectNext.id"
+ $projectQuery = 'query($name: String!, $number: Int!) { organization(login: $name) { projectV2(number: $number) { id } } }'
+ $selectQuery = ".data.organization.projectV2.id"
}
$projectId = gh api graphql -f query=$projectQuery -F name=$name -F number=$projectNumber --jq $selectQuery
+
+ if ($LASTEXITCODE) {
+ Write-Error "$projectId`nLASTEXITCODE = $LASTEXITCODE"
+ }
}
return $projectId
}
@@ -30,13 +34,16 @@ function Add-GithubIssueToProject([string]$projectId, [string]$issueId)
{
$projectItemId = gh api graphql -F projectId=$projectId -F issueId=$issueId -f query='
mutation($projectId: ID!, $issueId: ID!) {
- addProjectNextItem(input: {projectId: $projectId, contentId: $issueId}) {
- projectNextItem {
+ addProjectV2ItemById(input: {projectId: $projectId, contentId: $issueId}) {
+ item {
id
}
}
- }' --jq ".data.addProjectNextItem.projectNextItem.id"
+ }' --jq ".data.addProjectV2ItemById.item.id"
+ if ($LASTEXITCODE) {
+ Write-Error "$projectItemId`nLASTEXITCODE = $LASTEXITCODE"
+ }
return $projectItemId
}
@@ -44,11 +51,14 @@ function Remove-GithubIssueFromProject([string]$projectId, [string]$projectItemI
{
$projectDeletedItemId = gh api graphql -F projectId=$projectId -F itemId=$projectItemId -f query='
mutation($projectId: ID!, $itemId: ID!) {
- deleteProjectNextItem(input: {projectId: $projectId, itemId: $itemId} ) {
+ deleteProjectV2Item(input: {projectId: $projectId, itemId: $itemId} ) {
deletedItemId
}
- }' --jq ".data.deleteProjectNextItem.deletedItemId"
+ }' --jq ".data.deleteProjectV2Item.deletedItemId"
+ if ($LASTEXITCODE) {
+ Write-Error "$projectDeletedItemId`nLASTEXITCODE = $LASTEXITCODE"
+ }
return $projectDeletedItemId
}
diff --git a/src/dotnet/APIView/APIViewWeb/Client/tsconfig.json b/src/dotnet/APIView/APIViewWeb/Client/tsconfig.json
index 67efce34959..c1a76b12743 100644
--- a/src/dotnet/APIView/APIViewWeb/Client/tsconfig.json
+++ b/src/dotnet/APIView/APIViewWeb/Client/tsconfig.json
@@ -15,7 +15,8 @@
"types": [
"webpack-env",
"jquery",
- "jqueryui"
+ "jqueryui",
+ "node"
],
"paths": {
"@/*": [
diff --git a/src/dotnet/APIView/APIViewWeb/HostedServices/ReviewBackgroundHostedService.cs b/src/dotnet/APIView/APIViewWeb/HostedServices/ReviewBackgroundHostedService.cs
index 46d36e73d2a..d1ef881dcc5 100644
--- a/src/dotnet/APIView/APIViewWeb/HostedServices/ReviewBackgroundHostedService.cs
+++ b/src/dotnet/APIView/APIViewWeb/HostedServices/ReviewBackgroundHostedService.cs
@@ -1,6 +1,7 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
+// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
+using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using APIViewWeb.Managers;
@@ -16,6 +17,8 @@ public class ReviewBackgroundHostedService : BackgroundService
private readonly bool _isDisabled;
private readonly IReviewManager _reviewManager;
private readonly int _autoArchiveInactiveGracePeriodMonths; // This is inactive duration in months
+ private readonly HashSet _upgradeDisabledLangs = new HashSet();
+ private readonly int _backgroundBatchProcessCount;
static TelemetryClient _telemetryClient = new(TelemetryConfiguration.CreateDefault());
@@ -33,6 +36,18 @@ public ReviewBackgroundHostedService(IReviewManager reviewManager, IConfiguratio
{
_autoArchiveInactiveGracePeriodMonths = 4;
}
+ var backgroundTaskDisabledLangs = configuration["ReviewUpdateDisabledLanguages"];
+ if(!string.IsNullOrEmpty(backgroundTaskDisabledLangs))
+ {
+ _upgradeDisabledLangs.UnionWith(backgroundTaskDisabledLangs.Split(','));
+ }
+
+ // Number of review revisions to be passed to pipeline when updating review with a new parser version
+ var batchCount = configuration["ReviewUpdateBatchCount"];
+ if (String.IsNullOrEmpty(batchCount) || !int.TryParse(batchCount, out _backgroundBatchProcessCount))
+ {
+ _backgroundBatchProcessCount = 20;
+ }
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
@@ -41,7 +56,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
try
{
- await _reviewManager.UpdateReviewBackground();
+ await _reviewManager.UpdateReviewBackground(_upgradeDisabledLangs, _backgroundBatchProcessCount);
await ArchiveInactiveReviews(stoppingToken, _autoArchiveInactiveGracePeriodMonths);
}
catch (Exception ex)
diff --git a/src/dotnet/APIView/APIViewWeb/Managers/IReviewManager.cs b/src/dotnet/APIView/APIViewWeb/Managers/IReviewManager.cs
index 6817f42e50a..3d18e57a919 100644
--- a/src/dotnet/APIView/APIViewWeb/Managers/IReviewManager.cs
+++ b/src/dotnet/APIView/APIViewWeb/Managers/IReviewManager.cs
@@ -30,7 +30,7 @@ public interface IReviewManager
public Task ApprovePackageNameAsync(ClaimsPrincipal user, string id);
public Task IsReviewSame(ReviewRevisionModel revision, RenderedCodeFile renderedCodeFile);
public Task CreateMasterReviewAsync(ClaimsPrincipal user, string originalName, string label, Stream fileStream, bool compareAllRevisions);
- public Task UpdateReviewBackground();
+ public Task UpdateReviewBackground(HashSet updateDisabledLanguages, int backgroundBatchProcessCount);
public Task GetCodeFile(string repoName, string buildId, string artifactName, string packageName, string originalFileName, string codeFileName,
MemoryStream originalFileStream, string baselineCodeFileName = "", MemoryStream baselineStream = null, string project = "public");
public Task CreateApiReview(ClaimsPrincipal user, string buildId, string artifactName, string originalFileName, string label,
diff --git a/src/dotnet/APIView/APIViewWeb/Managers/ReviewManager.cs b/src/dotnet/APIView/APIViewWeb/Managers/ReviewManager.cs
index 074a9f0c7eb..b68de0a0ef4 100644
--- a/src/dotnet/APIView/APIViewWeb/Managers/ReviewManager.cs
+++ b/src/dotnet/APIView/APIViewWeb/Managers/ReviewManager.cs
@@ -8,6 +8,7 @@
using System.Linq;
using System.Security.Claims;
using System.Text.Json;
+using System.Threading;
using System.Threading.Tasks;
using ApiView;
using APIView.DIff;
@@ -338,26 +339,97 @@ public async Task CreateMasterReviewAsync(ClaimsPrincipal u
return await CreateMasterReviewAsync(user, codeFile, originalName, label, memoryStream, compareAllRevisions);
}
- public async Task UpdateReviewBackground()
+ public async Task UpdateReviewBackground(HashSet updateDisabledLanguages, int backgroundBatchProcessCount)
{
- var reviews = await _reviewsRepository.GetReviewsAsync(false, "All", fetchAllPages: true);
- foreach (var review in reviews.Where(r => IsUpdateAvailable(r)))
+ foreach(var language in LanguageService.SupportedLanguages)
{
- var requestTelemetry = new RequestTelemetry { Name = "Updating Review " + review.ReviewId };
- var operation = _telemetryClient.StartOperation(requestTelemetry);
- try
+ if (updateDisabledLanguages.Contains(language))
{
- await Task.Delay(500);
- await UpdateReviewAsync(review);
+ _telemetryClient.TrackTrace("Background task to update API review at startup is disabled for langauge " + language);
+ continue;
}
- catch (Exception e)
+
+ var languageService = GetLanguageService(language);
+ if (languageService == null)
+ return;
+
+ // If review is updated using devops pipeline then batch process update review requests
+ if (languageService.IsReviewGenByPipeline)
{
- _telemetryClient.TrackException(e);
+ await UpdateReviewsUsingPipeline(language, languageService, backgroundBatchProcessCount);
}
- finally
+ else
{
- _telemetryClient.StopOperation(operation);
+ var reviews = await _reviewsRepository.GetReviewsAsync(false, language, fetchAllPages: true);
+ foreach (var review in reviews.Where(r => IsUpdateAvailable(r)))
+ {
+ var requestTelemetry = new RequestTelemetry { Name = "Updating Review " + review.ReviewId };
+ var operation = _telemetryClient.StartOperation(requestTelemetry);
+ try
+ {
+ await Task.Delay(500);
+ await UpdateReviewAsync(review, languageService);
+ }
+ catch (Exception e)
+ {
+ _telemetryClient.TrackException(e);
+ }
+ finally
+ {
+ _telemetryClient.StopOperation(operation);
+ }
+ }
+ }
+ }
+ }
+
+ // Languages that full ysupport sandboxing updates reviews using Azure devops pipeline
+ // We should batch all eligible reviews to avoid a pipeline run storm
+ private async Task UpdateReviewsUsingPipeline(string language, LanguageService languageService, int backgroundBatchProcessCount)
+ {
+ var reviews = await _reviewsRepository.GetReviewsAsync(false, language, fetchAllPages: true);
+ var paramList = new List();
+
+ foreach(var review in reviews)
+ {
+ foreach (var revision in review.Revisions.Reverse())
+ {
+ foreach (var file in revision.Files)
+ {
+ //Don't include current revision if file is not required to be updated.
+ // E.g. json token file is uploaded for a language, specific revision was already upgraded.
+ if (!file.HasOriginal || file.FileName == null || !languageService.IsSupportedFile(file.FileName) || !languageService.CanUpdate(file.VersionString))
+ {
+ continue;
+ }
+
+ _telemetryClient.TrackTrace($"Updating review: {review.ReviewId}, revision: {revision.RevisionId}");
+ paramList.Add(new ReviewGenPipelineParamModel()
+ {
+ FileID = file.ReviewFileId,
+ ReviewID = review.ReviewId,
+ RevisionID = revision.RevisionId,
+ FileName = Path.GetFileName(file.FileName)
+ });
+ }
}
+
+ // This should be changed to configurable batch count
+ if (paramList.Count >= backgroundBatchProcessCount)
+ {
+ _telemetryClient.TrackTrace($"Running pipeline to update reviews for {language} with batch size {paramList.Count}");
+ await RunReviewGenPipeline(paramList, languageService.Name);
+ // Delay of 10 minute before starting next batch
+ // We should try to increase the number of revisions in the batch than number of runs.
+ await Task.Delay(600000);
+ paramList.Clear();
+ }
+ }
+
+ if (paramList.Count > 0)
+ {
+ _telemetryClient.TrackTrace($"Running pipeline to update reviews for {language} with batch size {paramList.Count}");
+ await RunReviewGenPipeline(paramList, languageService.Name);
}
}
@@ -627,74 +699,36 @@ public async Task IsApprovedForFirstRelease(string language, string packag
return reviews.Any();
}
- private async Task UpdateReviewOffline(ReviewModel review)
+
+ private async Task UpdateReviewAsync(ReviewModel review, LanguageService languageService)
{
- var paramList = new List();
- var languageService = GetLanguageService(review.Language);
foreach (var revision in review.Revisions.Reverse())
{
foreach (var file in revision.Files)
{
- //Don't include current revision if file is not required to be updated.
- // E.g. json token file is uploaded for a language, specific revision was already upgraded.
- if (!file.HasOriginal || file.FileName == null || !languageService.IsSupportedFile(file.FileName) || !languageService.CanUpdate(file.VersionString))
+ if (!file.HasOriginal || !languageService.CanUpdate(file.VersionString))
{
continue;
}
- paramList.Add(new ReviewGenPipelineParamModel()
- {
- FileID = file.ReviewFileId,
- ReviewID = review.ReviewId,
- RevisionID = revision.RevisionId,
- FileName = Path.GetFileName(file.FileName)
- });
- }
- }
- await RunReviewGenPipeline(paramList, languageService.Name);
- }
- private async Task UpdateReviewAsync(ReviewModel review)
- {
- var languageService = GetLanguageService(review.Language);
- if (languageService == null)
- return;
-
- if (languageService.IsReviewGenByPipeline)
- {
- // Wait 30 seconds before running next review gen using pipeline to reduce queueing pipeline jobs
- await Task.Delay(30000);
- await UpdateReviewOffline(review);
- }
- else
- {
- foreach (var revision in review.Revisions.Reverse())
- {
- foreach (var file in revision.Files)
+ try
{
- if (!file.HasOriginal || !languageService.CanUpdate(file.VersionString))
- {
- continue;
- }
-
- try
- {
- var fileOriginal = await _originalsRepository.GetOriginalAsync(file.ReviewFileId);
- // file.Name property has been repurposed to store package name and version string
- // This is causing issue when updating review using latest parser since it expects Name field as file name
- // We have added a new property FileName which is only set for new reviews
- // All older reviews needs to be handled by checking review name field
- var fileName = file.FileName ?? (Path.HasExtension(review.Name) ? review.Name : file.Name);
- var codeFile = await languageService.GetCodeFileAsync(fileName, fileOriginal, review.RunAnalysis);
- await _codeFileRepository.UpsertCodeFileAsync(revision.RevisionId, file.ReviewFileId, codeFile);
- // update only version string
- file.VersionString = codeFile.VersionString;
- await _reviewsRepository.UpsertReviewAsync(review);
- }
- catch (Exception ex)
- {
- _telemetryClient.TrackTrace("Failed to update review " + review.ReviewId);
- _telemetryClient.TrackException(ex);
- }
+ var fileOriginal = await _originalsRepository.GetOriginalAsync(file.ReviewFileId);
+ // file.Name property has been repurposed to store package name and version string
+ // This is causing issue when updating review using latest parser since it expects Name field as file name
+ // We have added a new property FileName which is only set for new reviews
+ // All older reviews needs to be handled by checking review name field
+ var fileName = file.FileName ?? (Path.HasExtension(review.Name) ? review.Name : file.Name);
+ var codeFile = await languageService.GetCodeFileAsync(fileName, fileOriginal, review.RunAnalysis);
+ await _codeFileRepository.UpsertCodeFileAsync(revision.RevisionId, file.ReviewFileId, codeFile);
+ // update only version string
+ file.VersionString = codeFile.VersionString;
+ await _reviewsRepository.UpsertReviewAsync(review);
+ }
+ catch (Exception ex)
+ {
+ _telemetryClient.TrackTrace("Failed to update review " + review.ReviewId);
+ _telemetryClient.TrackException(ex);
}
}
}
@@ -965,9 +999,11 @@ private async Task GenerateReviewOffline(ReviewModel review, string revisionId,
throw new Exception($"Failed to run pipeline for review: {param.ReviewID}, file: {param.FileName}");
}
- var paramList = new List();
- paramList.Add(param);
-
+ var paramList = new List
+ {
+ param
+ };
+
await RunReviewGenPipeline(paramList, languageService.Name);
}
diff --git a/src/dotnet/APIView/APIViewWeb/Pages/Assemblies/Review.cshtml b/src/dotnet/APIView/APIViewWeb/Pages/Assemblies/Review.cshtml
index cd31ad5a4cb..9c1777e32a3 100644
--- a/src/dotnet/APIView/APIViewWeb/Pages/Assemblies/Review.cshtml
+++ b/src/dotnet/APIView/APIViewWeb/Pages/Assemblies/Review.cshtml
@@ -1,442 +1,474 @@
-@page "{id}/{revisionId?}"
-@model APIViewWeb.Pages.Assemblies.ReviewPageModel
-@using APIViewWeb.Helpers
-@using APIViewWeb.Models
-@{
- Layout = "ReviewLayout";
- ViewData["Title"] = Model.Review.DisplayName;
- var userPreference = PageModelHelpers.GetUserPreference(Model._preferenceCache, User);
- TempData["UserPreference"] = userPreference;
- ViewBag.HasSections = (Model.CodeFile.LeafSections?.Count > 0) ? true : false;
-}
-
-