diff --git a/.coveragerc b/.coveragerc index 21aa73c31b30..4014e9186c74 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,10 +6,16 @@ omit = */test* env* +[paths] +source = + sdk/ + **/sdk + [report] exclude_lines = pragma: no cover if raw: if custom_headers: if headers: - if response.status_code not in \ No newline at end of file + if response.status_code not in + if TYPE_CHECKING: \ No newline at end of file diff --git a/.gitignore b/.gitignore index d357979f797d..0fe206db7138 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ __pycache__/ # Virtual environment env*/ +# Codespace virtual environment +pythonenv*/ + # PTVS analysis .ptvs/ @@ -30,6 +33,7 @@ build/ # Test results TestResults/ +ENV_DIR/ # tox generated artifacts test-junit-*.xml @@ -84,7 +88,7 @@ src/build # [begoldsm] ignore virtual env if it exists. adlEnv/ - +venv/ code_reports # Azure Storage test credentials @@ -96,4 +100,7 @@ sdk/storage/azure-storage-file-datalake/tests/settings_real.py sdk/cosmos/azure-cosmos/test/test_config.py # temp path to to run regression test -.tmp_code_path/ \ No newline at end of file +.tmp_code_path/ + +# env vars +.env diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index d457accf63c7..4b12f1bdcb8f 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -68,7 +68,7 @@ members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at [http://contributor-covenant.org/version/1/4][version] +available at [https://contributor-covenant.org/version/1/4][version] -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ +[homepage]: https://contributor-covenant.org +[version]: https://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 08468ee85c91..3bcf82e448d7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,7 +1,7 @@ # Contributing to Azure Python SDK If you would like to become an active contributor to this project please -follow the instructions provided in [Microsoft Azure Projects Contribution Guidelines](http://azure.github.io/guidelines/). +follow the instructions provided in [Microsoft Azure Projects Contribution Guidelines](https://opensource.microsoft.com/collaborate/). ## Building and Testing @@ -111,9 +111,17 @@ Mypy install and run. **Example: Invoke tox, breaking into the debugger on failure** `tox -e whl -c ../../../eng/tox/tox.ini -- --pdb` -### Daily Dev Build -Daily dev build version of Azure sdk packages for python are available and are uploaded to Azure devops feed nightly. We have also created a tox environment to test a package against dev built version of dependent packages. Below is the link to Azure devops feed. -`https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-python` +### Performance Testing + +SDK performance testing is supported via the custom `perfstress` framework. For full details on this framework, and how to write and run tests for an SDK - see the [perfstress tests documentation](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/perfstress_tests.md). + +### More Reading + +We maintain an [additional document](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/eng_sys_checks.md) that has a ton of detail as to what is actually _happening_ in these executions. + +### Dev Feed +Daily dev build version of Azure sdk packages for python are available and are uploaded to Azure devops feed daily. We have also created a tox environment to test a package against dev built version of dependent packages. Below is the link to Azure devops feed. +[`https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-python`](https://dev.azure.com/azure-sdk/public/_packaging?_a=feed&feed=azure-sdk-for-python) ##### To install latest dev build version of a package ``` @@ -139,4 +147,4 @@ This tox test( devtest) will fail if installed dependent packages are not dev bu ## Code of Conduct This project's code of conduct can be found in the [CODE_OF_CONDUCT.md file](https://github.com/Azure/azure-sdk-for-python/blob/master/CODE_OF_CONDUCT.md) -(v1.4.0 of the http://contributor-covenant.org/ CoC). +(v1.4.0 of the https://contributor-covenant.org/ CoC). diff --git a/NOTICE.txt b/NOTICE.txt index 29fd1c19d257..e7ab85b06abd 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,7 +1,17 @@ -This file list any third-party libraries or other resources that may be -distributed under licenses different than the Azure SDK for Python software. +NOTICES AND INFORMATION +Do Not Translate or Localize -In the event that we accidentally failed to list a required notice, please -bring it to our attention by opening an issue. +This software incorporates material from third parties. Microsoft makes certain +open source code available at https://3rdpartysource.microsoft.com, or you may +send a check or money order for US $5.00, including the product name, the open +source component name, and version number, to: -The attached notices are provided for information only. \ No newline at end of file +Source Code Compliance Team +Microsoft Corporation +One Microsoft Way +Redmond, WA 98052 +USA + +Notwithstanding any other terms, you may reverse engineer this software to the +extent required to debug changes to any libraries licensed under the GNU Lesser +General Public License. diff --git a/README.md b/README.md index 9818708cc708..6658d4673949 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Packages](https://img.shields.io/badge/packages-latest-blue.svg)](https://azure.github.io/azure-sdk/releases/latest/python.html) [![Dependencies](https://img.shields.io/badge/dependency-report-blue.svg)](https://azuresdkartifacts.blob.core.windows.net/azure-sdk-for-python/dependencies/dependencies.html) [![DepGraph](https://img.shields.io/badge/dependency-graph-blue.svg)](https://azuresdkartifacts.blob.core.windows.net/azure-sdk-for-python/dependencies/InterdependencyGraph.html) [![Python](https://img.shields.io/pypi/pyversions/azure-core.svg?maxAge=2592000)](https://pypi.python.org/pypi/azure/) [![Build Status](https://dev.azure.com/azure-sdk/public/_apis/build/status/python/python%20-%20core%20-%20ci?branchName=master)](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=458&branchName=master) -This repository is for active development of the Azure SDK for Python. For consumers of the SDK we recommend visiting our [public developer docs](https://docs.microsoft.com/en-us/python/azure/) or our versioned [developer docs](https://azure.github.io/azure-sdk-for-python). +This repository is for active development of the Azure SDK for Python. For consumers of the SDK we recommend visiting our [public developer docs](https://docs.microsoft.com/python/azure/) or our versioned [developer docs](https://azure.github.io/azure-sdk-for-python). ## Getting started @@ -17,13 +17,14 @@ The client libraries are supported on Python 2.7 and 3.5.3 or later. ## Packages available Each service might have a number of libraries available from each of the following categories: -* [Client - New Releases](#Client-New-Releases) -* [Client - Previous Versions](#Client-Previous-Versions) -* [Management](#Management) +* [Client - New Releases](#client-new-releases) +* [Client - Previous Versions](#client-previous-versions) +* [Management - New Releases](#management-new-releases) +* [Management - Previous Versions](#management-previous-versions) ### Client: New Releases -New wave of packages that we are announcing as **GA** and several that are currently releasing in **preview**. These libraries allow you to use and consume existing resources and interact with them, for example: upload a blob. These libraries share a number of core functionalities such as: retries, logging, transport protocols, authentication protocols, etc. that can be found in the [azure-core](./sdk/core/azure-core) library. You can learn more about these libraries by reading guidelines that they follow [here](https://azure.github.io/azure-sdk/python_introduction.html). +New wave of packages that we are announcing as **GA** and several that are currently releasing in **preview**. These libraries allow you to use and consume existing resources and interact with them, for example: upload a blob. These libraries share a number of core functionalities such as: retries, logging, transport protocols, authentication protocols, etc. that can be found in the [azure-core](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/core/azure-core) library. You can learn more about these libraries by reading guidelines that they follow [here](https://azure.github.io/azure-sdk/python/guidelines/index.html). You can find the [most up to date list of all of the new packages on our page](https://azure.github.io/azure-sdk/releases/latest/index.html#python-packages) @@ -31,11 +32,19 @@ You can find the [most up to date list of all of the new packages on our page](h ### Client: Previous Versions -Last stable versions of packages that have been provided for usage with Azure and are production-ready. These libraries provide you with similar functionalities to the Preview ones as they allow you to use and consume existing resources and interact with them, for example: upload a blob. They might not implement the [guidelines](https://azure.github.io/azure-sdk/python_introduction.html) or have the same feature set as the Novemeber releases. They do however offer wider coverage of services. +Last stable versions of packages that have been provided for usage with Azure and are production-ready. These libraries provide you with similar functionalities to the Preview ones as they allow you to use and consume existing resources and interact with them, for example: upload a blob. They might not implement the [guidelines](https://azure.github.io/azure-sdk/python/guidelines/index.html) or have the same feature set as the Novemeber releases. They do however offer wider coverage of services. -### Management +### Management: New Releases +A new set of management libraries that follow the [Azure SDK Design Guidelines for Python](https://azure.github.io/azure-sdk/python/guidelines/) are now available. These new libraries provide a number of core capabilities that are shared amongst all Azure SDKs, including the intuitive Azure Identity library, an HTTP Pipeline with custom policies, error-handling, distributed tracing, and much more. +Documentation and code samples for these new libraries can be found [here](https://aka.ms/azsdk/python/mgmt). In addition, a migration guide that shows how to transition from older versions of libraries is located [here](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/sphinx/mgmt_quickstart.rst#migration-guide). -Libraries which enable you to provision specific resources. They are responsible for directly mirroring and consuming Azure service's REST endpoints. The management libraries use the `azure-mgmt-` convention for their package names. +You can find the [most up to date list of all of the new packages on our page](https://azure.github.io/azure-sdk/releases/latest/mgmt/python.html) + +> NOTE: If you need to ensure your code is ready for production use one of the stable, non-preview libraries. Also, if you are experiencing authentication issues with the management libraries after upgrading certain packages, it's possible that you upgraded to the new versions of SDK without changing the authentication code, please refer to the migration guide mentioned above for proper instructions. + +### Management: Previous Versions +For a complete list of management libraries which enable you to provision and manage Azure resources, please [check here](https://azure.github.io/azure-sdk/releases/latest/all/python.html). They might not have the same feature set as the new releases but they do offer wider coverage of services. +Management libraries can be identified by namespaces that start with `azure-mgmt-`, e.g. `azure-mgmt-compute` ## Need help? @@ -53,7 +62,7 @@ Security issues and bugs should be reported privately, via email, to the Microso ## Contributing -For details on contributing to this repository, see the [contributing guide](CONTRIBUTING.md). +For details on contributing to this repository, see the [contributing guide](https://github.com/Azure/azure-sdk-for-python/blob/master/CONTRIBUTING.md). This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. diff --git a/SECURITY.md b/SECURITY.md index 79589f291845..dec3d3b7013b 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,8 +1,41 @@ -# Security Policy + -## Reporting a Vulnerability +## Security -Security issues and bugs should be reported privately, via email, to the Microsoft Security Response Center (MSRC) . You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Further information, including the MSRC PGP key, can be found in the [Security TechCenter](https://www.microsoft.com/msrc/faqs-report-an-issue). +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). -Please do not open issues for anything you think might have a security implication. +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/msrc/pgp-key-msrc). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + + * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) + * Full paths of source file(s) related to the manifestation of the issue + * The location of the affected source code (tag/branch/commit or direct URL) + * Any special configuration required to reproduce the issue + * Step-by-step instructions to reproduce the issue + * Proof-of-concept or exploit code (if possible) + * Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/msrc/cvd). + + diff --git a/build_package.py b/build_package.py index cb0064c33d7a..5c998ebcc182 100644 --- a/build_package.py +++ b/build_package.py @@ -9,8 +9,9 @@ import argparse import os import glob -from subprocess import check_call +import sys +from subprocess import check_call DEFAULT_DEST_FOLDER = "./dist" @@ -19,8 +20,8 @@ def create_package(name, dest_folder=DEFAULT_DEST_FOLDER): absdirs = [os.path.dirname(package) for package in (glob.glob('{}/setup.py'.format(name)) + glob.glob('sdk/*/{}/setup.py'.format(name)))] absdirpath = os.path.abspath(absdirs[0]) - check_call(['python', 'setup.py', 'bdist_wheel', '-d', dest_folder], cwd=absdirpath) - check_call(['python', 'setup.py', "sdist", "--format", "zip", '-d', dest_folder], cwd=absdirpath) + check_call([sys.executable, 'setup.py', 'bdist_wheel', '-d', dest_folder], cwd=absdirpath) + check_call([sys.executable, 'setup.py', "sdist", "--format", "zip", '-d', dest_folder], cwd=absdirpath) if __name__ == '__main__': diff --git a/ci_template.yml b/ci_template.yml index cbb383ff7eb7..99cd35337722 100644 --- a/ci_template.yml +++ b/ci_template.yml @@ -5,6 +5,7 @@ trigger: branches: include: - master + - main - hotfix/* - release/* - restapi* @@ -16,6 +17,7 @@ pr: branches: include: - master + - main - feature/* - hotfix/* - release/* diff --git a/common/smoketest/README.md b/common/smoketest/README.md index ae3dd469ed2e..d0adcac4171e 100644 --- a/common/smoketest/README.md +++ b/common/smoketest/README.md @@ -74,7 +74,7 @@ pip install -r requiriments.txt pip install -r requiriments_async.txt ``` -If a python version below 3.5 is being used, it is still possible to run the samples. When it gets to the async tests a message `'Async not suported'` will be displayed. +If a python version below 3.5 is being used, it is still possible to run the samples. When it gets to the async tests a message `'Async not supported'` will be displayed. ## Key concepts diff --git a/common/smoketest/key_vault_base.py b/common/smoketest/key_vault_base.py index 38fe851d7922..9ed79f2e1fb2 100644 --- a/common/smoketest/key_vault_base.py +++ b/common/smoketest/key_vault_base.py @@ -1,17 +1,22 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ import os -from azure.identity import DefaultAzureCredential, KnownAuthorities +from azure.identity import AzureAuthorityHosts, DefaultAzureCredential + class KeyVaultBase: credential_type = DefaultAzureCredential host_alias_map = { - 'AzureChinaCloud': KnownAuthorities.AZURE_CHINA, - 'AzureGermanCloud': KnownAuthorities.AZURE_GERMANY, - 'AzureUSGovernment': KnownAuthorities.AZURE_GOVERNMENT, - 'AzureCloud': KnownAuthorities.AZURE_PUBLIC_CLOUD, + "AzureChinaCloud": (AzureAuthorityHosts.AZURE_CHINA, "2016-10-01"), + "AzureGermanCloud": (AzureAuthorityHosts.AZURE_GERMANY, "2016-10-01"), + "AzureUSGovernment": (AzureAuthorityHosts.AZURE_GOVERNMENT, "2016-10-01"), + "AzureCloud": (AzureAuthorityHosts.AZURE_PUBLIC_CLOUD, "7.1"), } - # Instantiate a default credential based on the credential_type - def get_default_credential(self, authority_host_alias=None): - alias = authority_host_alias or os.environ.get("AZURE_CLOUD") - authority_host = self.host_alias_map.get(alias, KnownAuthorities.AZURE_PUBLIC_CLOUD) - return self.credential_type(authority=authority_host) + def get_client_args(self, authority_host_alias=None): + alias = authority_host_alias or os.environ.get("AZURE_CLOUD", "AzureCloud") + authority_host, api_version = self.host_alias_map[alias] + credential = self.credential_type(authority=authority_host) + return {"api_version": api_version, "credential": credential, "vault_url": os.environ["AZURE_PROJECT_URL"]} diff --git a/common/smoketest/key_vault_base_async.py b/common/smoketest/key_vault_base_async.py index c6f25493c2ca..c25bc9051434 100644 --- a/common/smoketest/key_vault_base_async.py +++ b/common/smoketest/key_vault_base_async.py @@ -1,5 +1,10 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ from key_vault_base import KeyVaultBase from azure.identity.aio import DefaultAzureCredential + class KeyVaultBaseAsync(KeyVaultBase): credential_type = DefaultAzureCredential \ No newline at end of file diff --git a/common/smoketest/key_vault_certificates.py b/common/smoketest/key_vault_certificates.py index b9c21a9ffce8..dde2dc898a28 100644 --- a/common/smoketest/key_vault_certificates.py +++ b/common/smoketest/key_vault_certificates.py @@ -2,19 +2,15 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.certificates import CertificateClient, CertificatePolicy from key_vault_base import KeyVaultBase + class KeyVaultCertificates(KeyVaultBase): def __init__(self): - - credential = self.get_default_credential() - self.certificate_client = CertificateClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) - + args = self.get_client_args() + self.certificate_client = CertificateClient(**args) self.certificate_name = "cert-name-" + uuid.uuid1().hex def create_certificate(self): diff --git a/common/smoketest/key_vault_certificates_async.py b/common/smoketest/key_vault_certificates_async.py index fae70305e9ce..6c5b5e5e3043 100644 --- a/common/smoketest/key_vault_certificates_async.py +++ b/common/smoketest/key_vault_certificates_async.py @@ -2,19 +2,16 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.certificates import CertificatePolicy from azure.keyvault.certificates.aio import CertificateClient from key_vault_base_async import KeyVaultBaseAsync + class KeyVaultCertificates(KeyVaultBaseAsync): def __init__(self): - credential = self.get_default_credential() - self.certificate_client = CertificateClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) - + args = self.get_client_args() + self.certificate_client = CertificateClient(**args) self.certificate_name = "cert-name-" + uuid.uuid1().hex async def create_certificate(self): diff --git a/common/smoketest/key_vault_keys.py b/common/smoketest/key_vault_keys.py index dc712ee82a6f..d7b22af18004 100644 --- a/common/smoketest/key_vault_keys.py +++ b/common/smoketest/key_vault_keys.py @@ -2,7 +2,6 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.keys import KeyClient from key_vault_base import KeyVaultBase @@ -10,11 +9,8 @@ class KeyVaultKeys(KeyVaultBase): def __init__(self): - credential = self.get_default_credential() - self.key_client = KeyClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) - + args = self.get_client_args() + self.key_client = KeyClient(**args) self.key_name = "key-name-" + uuid.uuid1().hex def create_rsa_key(self): diff --git a/common/smoketest/key_vault_keys_async.py b/common/smoketest/key_vault_keys_async.py index 071470f2b4f4..d63c056fbe96 100644 --- a/common/smoketest/key_vault_keys_async.py +++ b/common/smoketest/key_vault_keys_async.py @@ -2,7 +2,6 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.keys.aio import KeyClient from key_vault_base_async import KeyVaultBaseAsync @@ -10,12 +9,8 @@ class KeyVaultKeys(KeyVaultBaseAsync): def __init__(self): - - credential = self.get_default_credential() - self.key_client = KeyClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) - + args = self.get_client_args() + self.key_client = KeyClient(**args) self.key_name = "key-name-" + uuid.uuid1().hex async def create_rsa_key(self): diff --git a/common/smoketest/key_vault_secrets.py b/common/smoketest/key_vault_secrets.py index 1340b546399a..6751c7ad6b33 100644 --- a/common/smoketest/key_vault_secrets.py +++ b/common/smoketest/key_vault_secrets.py @@ -2,18 +2,15 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.secrets import SecretClient from key_vault_base import KeyVaultBase + class KeyVaultSecrets(KeyVaultBase): def __init__(self): - credential = self.get_default_credential() - self.secret_client = SecretClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) - + args = self.get_client_args() + self.secret_client = SecretClient(**args) self.secret_name = "secret-name-" + uuid.uuid1().hex self.secret_Value = "secret-value" diff --git a/common/smoketest/key_vault_secrets_async.py b/common/smoketest/key_vault_secrets_async.py index 807a93d1d198..c054b0a67b8d 100644 --- a/common/smoketest/key_vault_secrets_async.py +++ b/common/smoketest/key_vault_secrets_async.py @@ -2,17 +2,15 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import os import uuid from azure.keyvault.secrets.aio import SecretClient from key_vault_base_async import KeyVaultBaseAsync + class KeyVaultSecrets(KeyVaultBaseAsync): def __init__(self): - credential = self.get_default_credential() - self.secret_client = SecretClient( - vault_url=os.environ["AZURE_PROJECT_URL"], credential=credential - ) + args = self.get_client_args() + self.secret_client = SecretClient(**args) self.secret_name = "secret-name-" + uuid.uuid1().hex self.secret_value = "secret-value" diff --git a/common/smoketest/requirements-release.txt b/common/smoketest/requirements-release.txt new file mode 100644 index 000000000000..f32ec3483233 --- /dev/null +++ b/common/smoketest/requirements-release.txt @@ -0,0 +1,8 @@ +azure-core>=0.0.0b1 +azure-identity>=0.0.0b1 +azure-cosmos>=4.0.0b5 +azure-eventhub>=0.0.0b1 +azure-keyvault-certificates>=0.0.0b1 +azure-keyvault-keys>=0.0.0b1 +azure-keyvault-secrets>=0.0.0b1 +azure-storage-blob>=0.0.0b1 diff --git a/common/smoketest/requirements.txt b/common/smoketest/requirements.txt index 54f85cbc5d35..e5c87e9f6f6d 100644 --- a/common/smoketest/requirements.txt +++ b/common/smoketest/requirements.txt @@ -5,4 +5,4 @@ azure-eventhub azure-keyvault-certificates azure-keyvault-keys azure-keyvault-secrets -azure-storage-blob \ No newline at end of file +azure-storage-blob diff --git a/common/smoketest/smoke-test.yml b/common/smoketest/smoke-test.yml index ad7c8ebe6a4c..e64dee5d8e87 100644 --- a/common/smoketest/smoke-test.yml +++ b/common/smoketest/smoke-test.yml @@ -1,117 +1,4 @@ -variables: - InstallAsyncRequirements: true - jobs: - - job: - strategy: - matrix: - Python_27_Linux (AzureCloud): - PythonVersion: '2.7' - InstallAsyncRequirements: false - OSVmImage: ubuntu-18.04 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_37_Linux (AzureCloud): - PythonVersion: '3.7' - OSVmImage: ubuntu-18.04 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_38_Linux (AzureCloud): - PythonVersion: '3.8' - OSVmImage: ubuntu-18.04 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_37_Windows (AzureCloud): - PythonVersion: '3.7' - OSVmImage: windows-2019 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_38_Windows (AzureCloud): - PythonVersion: '3.8' - OSVmImage: windows-2019 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_37_Mac (AzureCloud): - PythonVersion: '3.7' - OSVmImage: macOS-10.15 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_38_Mac (AzureCloud): - PythonVersion: '3.8' - OSVmImage: macOS-10.15 - SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) - ArmTemplateParameters: $(azureCloudArmParameters) - Python_38_Linux (AzureUSGovernment): - PythonVersion: '3.8' - OSVmImage: ubuntu-18.04 - SubscriptionConfiguration: $(sub-config-gov-test-resources) - ArmTemplateParameters: $(azureUSGovernmentArmParameters) - Python_37_Windows (AzureUSGovernment): - PythonVersion: '3.7' - OSVmImage: windows-2019 - SubscriptionConfiguration: $(sub-config-gov-test-resources) - ArmTemplateParameters: $(azureUSGovernmentArmParameters) - Python_38_Linux (AzureChinaCloud): - PythonVersion: '3.8' - OSVmImage: ubuntu-18.04 - SubscriptionConfiguration: $(sub-config-cn-test-resources) - Location: 'chinanorth' - ArmTemplateParameters: $(azureChinaCloudArmParameters) - Python_37_Windows (AzureChinaCloud): - PythonVersion: '3.7' - OSVmImage: windows-2019 - SubscriptionConfiguration: $(sub-config-cn-test-resources) - Location: 'chinanorth' - ArmTemplateParameters: $(azureChinaCloudArmParameters) - - pool: - vmImage: $(OSVmImage) - - variables: - Location: '' - azureCloudArmParameters: "@{ storageEndpointSuffix = 'core.windows.net'; azureCloud = 'AzureCloud'; }" - azureUSGovernmentArmParameters: "@{ storageEndpointSuffix = 'core.usgovcloudapi.net'; azureCloud = 'AzureUSGovernment'; }" - azureChinaCloudArmParameters: "@{ storageEndpointSuffix = 'core.chinacloudapi.cn'; azureCloud = 'AzureChinaCloud'; }" - - steps: - - task: UsePythonVersion@0 - displayName: "Use Python $(PythonVersion)" - inputs: - versionSpec: $(PythonVersion) - - - script: | - python -m pip install pip==20.0.2 - pip --version - displayName: pip --version - - - script: pip install -r ./common/smoketest/requirements.txt --pre --no-deps --index-url https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple - displayName: Install requirements from dev feed without dependencies - - - script: pip install -r ./common/smoketest/requirements_async.txt - displayName: "Install requirements_async.txt" - condition: and(succeeded(), eq(variables['InstallAsyncRequirements'], 'true')) - - - script: python ./common/smoketest/dependencies.py -r ./common/smoketest/requirements.txt | tee ./common/smoketest/requirements_dependencies.txt - displayName: Create dependency list from installed dev packages - - - script: pip install -r ./common/smoketest/requirements_dependencies.txt - displayName: Install dev package dependencies from PyPI - - - script: pip freeze - displayName: Show installed packages (pip freeze) - - - template: /eng/common/TestResources/deploy-test-resources.yml - parameters: - ServiceDirectory: '$(Build.SourcesDirectory)/common/smoketest/' - SubscriptionConfiguration: $(SubscriptionConfiguration) - Location: $(Location) - ArmTemplateParameters: $(ArmTemplateParameters) - - - script: python ./common/smoketest/program.py - displayName: Run Smoke Test - - - template: /eng/common/TestResources/remove-test-resources.yml - parameters: - ServiceDirectory: '$(Build.SourcesDirectory)/common/smoketest/' - SubscriptionConfiguration: $(SubscriptionConfiguration) - + - template: /eng/pipelines/templates/jobs/smoke-test.yml + parameters: + Daily: true diff --git a/doc/README.md b/doc/README.md index 0756c6aecdfe..236b1b83b742 100644 --- a/doc/README.md +++ b/doc/README.md @@ -1,5 +1,7 @@ This folder contains some documentations for this repository: The folder structure is the following -- [sphinx](./sphinx) : contains the documentation source code for https://azure.github.io/azure-sdk-for-python/ -- [dev](./dev) : contains advanced documentation for _developers_ of SDK (not _consumers_ of SDK) \ No newline at end of file +- [sphinx](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/sphinx) : contains the documentation source code for https://azure.github.io/azure-sdk-for-python/ +- [dev](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev) : contains advanced documentation for _developers_ of SDK (not _consumers_ of SDK) + +The file [eng_sys_checks](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/eng_sys_checks.md) is a read up as to what a standard `ci.yml` will actually execute. diff --git a/doc/dev/README.md b/doc/dev/README.md index 9afddd10aa13..dcd202bd93d3 100644 --- a/doc/dev/README.md +++ b/doc/dev/README.md @@ -3,8 +3,8 @@ This folder contains documentation for developers of SDK: internal teams at Microsoft, or advanced contributors. Overview of the documents: -- [dev setup](./dev_setup.md) : How to create a development environment for this repo -- [release](./release.md) : How to release a package when ready -- [packaging.md](./packaging.md) : How to organize packaging information for packages under `azure` +- [dev setup](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/dev_setup.md) : How to create a development environment for this repo +- [release](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/release.md) : How to release a package when ready +- [packaging.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/packaging.md) : How to organize packaging information for packages under `azure` -The [mgmt](./mgmt) folder contains information specific to management packages (i.e. packages prefixed by `azure-mgmt`) \ No newline at end of file +The [mgmt](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt) folder contains information specific to management packages (i.e. packages prefixed by `azure-mgmt`) diff --git a/doc/dev/dev_setup.md b/doc/dev/dev_setup.md index 610ca428c404..f2dca9198181 100644 --- a/doc/dev/dev_setup.md +++ b/doc/dev/dev_setup.md @@ -7,20 +7,21 @@ or execute the various commands available in the toolbox. 1. If you don't already have it, install Python: - - Windows: https://www.python.org/downloads/ or from the windows store https://www.microsoft.com/p/python-37/9nj46sx7x90p + - Windows: [Python website][python_website] or from the [Windows store][python_39] - Ubuntu/Debian `sudo apt-get install python3` - RHEL/CentOS `sudo yum install python3` Python is also available in Bash for Windows natively. -3. Clone the repository and go to the folder +2. Clone the repository and go to the folder ``` git clone https://github.com/Azure/azure-sdk-for-python.git cd azure-sdk-for-python ``` -2. Create a [virtual environment](https://docs.python.org/3/tutorial/venv.html) +3. Create a [virtual environment][virtual_environment] + You can initialize a virtual environment this way: ``` @@ -30,14 +31,25 @@ or execute the various commands available in the toolbox. ./env/scripts/activate.bat # Windows CMD only ``` -4. Setup your env (installing dependencies, etc.) +4. Setup your development environment + Install the development requirements for a specific library (located in the `dev_requirements.txt` file at the root of the library), [Tox][tox], [Tox monorepo][tox_monorepo] and an editable install of your library: ``` - python scripts/dev_setup.py + azure-sdk-for-python> cd sdk/formrecognizer/azure-ai-formrecognizer + azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install -r dev_requirements.txt + azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install tox tox-monorepo + azure-sdk-for-python/sdk/formrecognizer/azure-ai-formrecognizer> pip install -e . ``` - If you work on a particular package, you can setup only this package to save some installation time: +5. Create a .env file to store your secrets. - ``` - python scripts/dev_setup.py -p azure-mgmt-service - ``` + The recommended place to store your .env file is one directory higher than the `azure-sdk-for-python` location. + This ensures the secrets will be loaded by the interpreter and most importantly not be committed to Git history. + + + +[python_website]: https://www.python.org/downloads/ +[python_39]: https://www.microsoft.com/p/python-39/9p7qfqmjrfp7 +[virtual_environment]: https://docs.python.org/3/tutorial/venv.html +[tox]: https://tox.readthedocs.io/en/latest/ +[tox_monorepo]: https://pypi.org/project/tox-monorepo/ \ No newline at end of file diff --git a/doc/dev/mgmt/README.md b/doc/dev/mgmt/README.md index bd7c129c274c..34a1bfb2bfb2 100644 --- a/doc/dev/mgmt/README.md +++ b/doc/dev/mgmt/README.md @@ -2,13 +2,13 @@ The documentation in that folder is intended for developers at Microsoft, or for advanced contributors, in the ARM (Azure Resource Management) world. -The [cheat sheet](./cheatsheet.md) page contains a quick reminder of the basic commands if you are already familiar with this repo. +The [cheat sheet](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/cheatsheet.md) page contains a quick reminder of the basic commands if you are already familiar with this repo. In order of workflow: -- [swagger_conf.md](./swagger_conf.md) : Describe how to configure the different Readme on https://github.com/Azure/azure-rest-api-specs for Python -- [tests.md](./tests.md) : How to test management SDK (recordings, etc.) -- [mgmt_release.md](./mgmt_release.md) : How to finish preapring the package, changelog, version, etc. +- [swagger_conf.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/swagger_conf.md) : Describe how to configure the different Readme on https://github.com/Azure/azure-rest-api-specs for Python +- [tests.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/tests.md) : How to test management SDK (recordings, etc.) +- [mgmt_release.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/mgmt_release.md) : How to finish preapring the package, changelog, version, etc. More implementations / advanced documents: -- [changelog_impl.md](./changelog_impl.md) : Technical details about the change log tool implementation +- [changelog_impl.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/changelog_impl.md) : Technical details about the change log tool implementation diff --git a/doc/dev/mgmt/generating-integration-test.md b/doc/dev/mgmt/generating-integration-test.md index 98de9892d535..af07d85ebb3e 100644 --- a/doc/dev/mgmt/generating-integration-test.md +++ b/doc/dev/mgmt/generating-integration-test.md @@ -123,7 +123,7 @@ now you can run live integration test: pytest -s sdk/attestation/azure-mgmt-attestation ->NOTE: To create service principal, follow instructions here: https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal +>NOTE: To create service principal, follow instructions here: https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal ## Fixing Test diff --git a/doc/dev/mgmt/generation.md b/doc/dev/mgmt/generation.md index f5fec80c2d02..84fbab3b8dad 100644 --- a/doc/dev/mgmt/generation.md +++ b/doc/dev/mgmt/generation.md @@ -1,8 +1,8 @@ # Generation of SDK -Assuming your Swagger are associated with correct Readmes (otherwise see previous chapter [Swagger conf](./swagger_conf.md)), this page explains how to generate your packages. +Assuming your Swagger are associated with correct Readmes (otherwise see previous chapter [Swagger conf](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/swagger_conf.md)), this page explains how to generate your packages. -IMPORTANT NOTE: All the commands prefixed by `python` in this page assumes you have loaded the [dev_setup](../dev_setup.md) in your currently loaded virtual environment. +IMPORTANT NOTE: All the commands prefixed by `python` in this page assumes you have loaded the [dev_setup](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/dev_setup.md) in your currently loaded virtual environment. ## Building the code diff --git a/doc/dev/mgmt/mgmt_release.md b/doc/dev/mgmt/mgmt_release.md index c515b38d2f65..abefe8dc583e 100644 --- a/doc/dev/mgmt/mgmt_release.md +++ b/doc/dev/mgmt/mgmt_release.md @@ -16,7 +16,23 @@ Once Swagger PR is merged, SDK automation will create pull request to Azure SDK Once you have a PR that contains accurate with correct tests (or no tests at all, but CI is green), this page explains how to prepare for a release. -IMPORTANT NOTE: All the commands in this page assumes you have loaded the [dev_setup](../dev_setup.md) in your currently loaded virtual environment. +IMPORTANT NOTE: All the commands in this page assumes you have loaded the [dev_setup](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/dev_setup.md) in your currently loaded virtual environment. + +## Manual generation + +If the automation is not doing its job to create an auto PR, Python has a SwaggerToSdk CLI that can be used to generate a specific Readme. You need +a virtual environment loaded with at least `tools/azure-sdk-tools` installed. + +```shell +# Using default configuration (this can be a Github raw link) +generate_sdk -v -m ..\azure-rest-api-specs\specification\compute\resource-manager\readme.md + +# Forcing Track1 generation +generate_sdk -v -c eng\swagger_to_sdk_config_v4.json -m ..\azure-rest-api-specs\specification\cognitiveservices\data-plane\Face\readme.md + +# For more details about the available options +generate_sdk --help +``` ## Building the packaging information @@ -142,4 +158,4 @@ Python SDK _strictly_ follows [semver](https://semver.org/). A few notes: - Next stable breaking is 3.0.0 - Next preview feature is 2.2.0rc1 - Next stable feature is 2.2.0 - - Next patch is 2.1.1 \ No newline at end of file + - Next patch is 2.1.1 diff --git a/doc/dev/mgmt/multiapi.md b/doc/dev/mgmt/multiapi.md index 4792c9a28baa..1fd0127e3bb3 100644 --- a/doc/dev/mgmt/multiapi.md +++ b/doc/dev/mgmt/multiapi.md @@ -14,7 +14,7 @@ Because there is different flavors of Azure that are not necessarly provided wit ### Why a multi-api package? -Indeed, a simple solution would be to write down explictly what version of SDK supports what API version. Example: 1.0 supports 2015-06-01, 2.0 supports 2017-07-01, etc. The story for customers then would be to pin the specific SDK version for the specific API version they need. However, this was considered unacceptable in an end-to-end scenario: +Indeed, a simple solution would be to write down explicitly what version of SDK supports what API version. Example: 1.0 supports 2015-06-01, 2.0 supports 2017-07-01, etc. The story for customers then would be to pin the specific SDK version for the specific API version they need. However, this was considered unacceptable in an end-to-end scenario: - It means you cannot install in the same Python environment packages that would target different cloud (Python doesn't allow installation of different versions of the same package together). Azure CLI or Ansible supports for different clouds would then be extremely complicated. - This forces customers to use old SDK, that might have been fixed on different axis than API version (security fixes, new SDK features like async, etc.) - Customers rarely needs only one package, but a set of them (storage, compute, network, etc.) and having to keep track of the correct list of packages is challenging. @@ -44,7 +44,7 @@ Network interfaces operations are defines in a [network interface file](https:// **Python multi-api packaging is based on the assumptions that it's true.** If it's not, it's usually ok but requires a little more subtle packaging (see final section here) -Being that a given Swagger defines only *one* fixed API version, doing multi-api version in one package implies shipping several Swagger files into one package. This is achived by the `batch` directive of Autorest. More details on how to write Readme for Swagger in the specific page for it [swagger_conf.md](./swagger_conf.md). +Being that a given Swagger defines only *one* fixed API version, doing multi-api version in one package implies shipping several Swagger files into one package. This is archived by the `batch` directive of Autorest. More details on how to write Readme for Swagger in the specific page for it [swagger_conf.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/swagger_conf.md). Python SDK team is responsible to design the correct set of tags to set for the `batch` node. Each line of the batch directive should contains only *one* api version to match the folder name used. this might require adding new tags in the readme.md that are specific to only one API version. These tags are usually suffixed by "-only" ([example with compute](https://github.com/Azure/azure-rest-api-specs/tree/master/specification/compute/resource-manager#tag-package-2019-03-01-only)) @@ -119,4 +119,4 @@ If this is not the same API version, then we need to bend the rules a little: we Current implementation assumes operation group are unique, and as discussed it's not always the case. Also, this limitation has impact on intellisense right now. Example, if a user types `compute_client.virtual_machines.` and hit the intellisense shortcut, users won't see any suggestions. It's because the `virtual_machines` property is dynamic and can change depending of dynamic configuration. -To improve intellisense and allow operation level profile, the concept would be to make the [operation mixin multi-api concept](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/network/azure-mgmt-network/azure/mgmt/network/_operations_mixin.py) applicable to operation groups as well. I estimate this work to a week of dev-ish. \ No newline at end of file +To improve intellisense and allow operation level profile, the concept would be to make the [operation mixin multi-api concept](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/network/azure-mgmt-network/azure/mgmt/network/_operations_mixin.py) applicable to operation groups as well. I estimate this work to a week of dev-ish. diff --git a/doc/dev/mgmt/swagger/multi_api/readme.md b/doc/dev/mgmt/swagger/multi_api/readme.md index 6fcc480c6da6..bdc5029d5929 100644 --- a/doc/dev/mgmt/swagger/multi_api/readme.md +++ b/doc/dev/mgmt/swagger/multi_api/readme.md @@ -8,7 +8,7 @@ This is the AutoRest configuration file for Network. ## Getting Started -To build the SDK for Network, simply [Install AutoRest](https://github.com/Azure/autorest/blob/master/docs/installing-autorest.md) and in this folder, run: +To build the SDK for Network, simply [Install AutoRest](https://github.com/Azure/autorest/blob/master/docs/install/readme.md) and in this folder, run: > `autorest` diff --git a/doc/dev/mgmt/swagger/single_api/readme.md b/doc/dev/mgmt/swagger/single_api/readme.md index 233f19b4173e..8c33429784a2 100644 --- a/doc/dev/mgmt/swagger/single_api/readme.md +++ b/doc/dev/mgmt/swagger/single_api/readme.md @@ -8,7 +8,7 @@ This is the AutoRest configuration file for Cdn. --- ## Getting Started -To build the SDK for Cdn, simply [Install AutoRest](https://github.com/Azure/autorest/blob/master/docs/installing-autorest.md) and in this folder, run: +To build the SDK for Cdn, simply [Install AutoRest](https://github.com/Azure/autorest/blob/master/docs/install/readme.md) and in this folder, run: > `autorest` @@ -66,4 +66,4 @@ swagger-to-sdk: ## Python -See configuration in [readme.python.md](./readme.python.md) +See configuration in [readme.python.md](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/swagger/single_api/readme.python.md) diff --git a/doc/dev/mgmt/swagger_conf.md b/doc/dev/mgmt/swagger_conf.md index b026757b1306..a91ec28f8aac 100644 --- a/doc/dev/mgmt/swagger_conf.md +++ b/doc/dev/mgmt/swagger_conf.md @@ -10,7 +10,7 @@ Nowadays, swagger files are actually not the input for Autorest, the input is _r autorest readme.md --python ``` -In practical terms, we want to control the version of Autorest used, the output folder, etc. but this article will focus on Swagger and Readme. For more details about generation, see the [generation page](./generation.md). +In practical terms, we want to control the version of Autorest used, the output folder, etc. but this article will focus on Swagger and Readme. For more details about generation, see the [generation page](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/mgmt/generation.md). ## Writing the readme diff --git a/doc/dev/mgmt/tests.md b/doc/dev/mgmt/tests.md index 8816ed5056d9..a96160a1e779 100644 --- a/doc/dev/mgmt/tests.md +++ b/doc/dev/mgmt/tests.md @@ -5,188 +5,145 @@ - [Setting up a test environment](#setting-up-a-test-environment) - [Running the tests](#running-the-tests) - [Getting Azure credentials](#getting-azure-credentials) - - [Get a token with Azure Active Directory user/password](#get-a-token-with-azure-active-directory-userpassword) - [Get a token with Active Directory application and service principal](#get-a-token-with-active-directory-application-and-service-principal) - [Providing credentials to the tests](#providing-credentials-to-the-tests) - [Running tests in live mode](#running-tests-in-live-mode) -- [Using the Azure Python SDK test framework](#using-the-azure-python-sdk-test-framework) +- [Using the Azure Python SDK test framework](#writing-new-tests) -IMPORTANT NOTE: All the commands in this page assumes you have loaded the [dev_setup](../dev_setup.md) in your currently loaded virtual environment. +IMPORTANT NOTE: All the commands in this page assumes you have loaded the [dev_setup][dev_setup] in your currently loaded virtual environment. # Overview -This page is to help you write tests for Azure Python SDK when these tests require Azure HTTP requests. -The Azure SDK test framework uses the [`azure-devtools`](https://pypi.python.org/pypi/azure-devtools) package, -which in turn rests upon on a HTTP recording system ([vcrpy](https://pypi.python.org/pypi/vcrpy)) -that enables tests depending on network interaction -to be run offline. +This page is to help you write tests for Azure Python SDK when these tests require Azure HTTP requests. The Azure SDK test framework uses the [`azure-devtools`][azure_devtools] package, which in turn rests upon on a HTTP recording system ([vcrpy][vcrpy]) that enables tests dependent on network interaction to be run offline. In this document, we will describe: -- How to run the tests offline, using previously recorded HTTP interactions, - or online, by authenticating with Azure to record new HTTP interactions -- How to write new tests using our utility classes +- [How to run the tests online (by authenticating with Azure to record new HTTP interactions)](#running-tests-in-live-mode) +- [How to run the tests offline (using previously recorded HTTP interactions)](#running-tests-in-playback-mode) +- [How to write new tests using our utility classes](#writing-new-tests) # Getting the tests to run -This section describes how to run the SDK tests, -by installing dependencies into a virtual environment -and using a helper script and the [pytest](https://docs.pytest.org/en/latest/) test runner -to choose which tests to run. +This section describes how to run the SDK tests, by installing dependencies into a virtual environment and using a helper script and the [pytest][pytest] test runner to choose which tests to run. ## Running the tests -Azure SDK tests use [pytest](https://docs.pytest.org/en/latest/) test runner. -To run all the tests, you can use the following command: +In this section, we will be running the tests in `azure-mgmt-storage`. Azure SDK tests use [pytest][pytest] test runner. To run all the tests, you can use the following command: ```Shell -pytest +sdk/storage/azure-mgmt-storage> pytest ``` -You can provide directories or individual files as positional arguments -to specify particular tests to run -rather than running the entire test suite. For example: +You can provide directories or individual files as positional arguments to specify particular tests to run rather than running the entire test suite. For example: ```Shell -pytest -s sdk/storage/azure-mgmt-storage/ -pytest sdk/storage/azure-mgmt-storage/tests/test_mgmt_storage.py +sdk/storage/azure-mgmt-storage> pytest +sdk/storage/azure-mgmt-storage> pytest tests/test_mgmt_storage.py ``` -By default, tests run in playback mode, -using recordings of HTTP interactions to simulate -requests made against Azure and allow the tests to run offline. -To run the tests in live (or "recording") mode, -you'll need to set up token-based Azure authentication. +If you have print statements in your tests for debugging you can add the `-s` flag to send those print statements to standard output: +```Shell +sdk/storage/azure-mgmt-storage> pytest -s +``` ## Getting Azure credentials -There are several ways to authenticate to Azure, -but to be able to record test HTTP interactions, -you must use an OAuth authentication method which gives you a token: -- Azure Active Directory user/password +There are several ways to authenticate to Azure, but to be able to record test HTTP interactions, you must use an OAuth authentication method which gives you a token: - Active Directory application and service principal -Certificate authentication does not allow you to record HTTP queries for testing. - -### Get a token with Azure Active Directory user/password. This is considered deprecated and should not be used anymore (https://docs.microsoft.com/en-us/python/azure/python-sdk-azure-authenticate?view=azure-python#mgmt-auth-legacy). - -1. Connect to the [Azure Classic Portal](https://manage.windowsazure.com/) with your admin account. -2. Create a user in your default AAD https://azure.microsoft.com/en-us/documentation/articles/active-directory-create-users/ - **You must NOT activate Multi-Factor Authentication!** -3. Go to Settings - Administrators. -4. Click on *Add* and enter the email of the new user. - Check the checkbox of the subscription you want to test with this user. -5. Login to Azure Portal with this new user to change the temporary password to a new one. - You will not be able to use the temporary password for OAuth login. - -You are now able to log in Python using OAuth. You can test with this code: -```python -from azure.common.credentials import UserPassCredentials -credentials = UserPassCredentials( - 'user@domain.com', # Your new user - 'my_smart_password', # Your password -) -``` - ### Get a token with Active Directory application and service principal Follow this detailed tutorial to set up an Active Directory application and service principal: -https://azure.microsoft.com/en-us/documentation/articles/resource-group-create-service-principal-portal/ +https://azure.microsoft.com/documentation/articles/resource-group-create-service-principal-portal/ -To use the credentials from Python, -you need the application ID (a.k.a. client ID), -authentication key (a.k.a. client secret), -tenant ID and subscription ID from the Azure portal for use in the next step. -[This section of the above tutorial](https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-create-service-principal-portal#get-application-id-and-authentication-key) -describes where to find them -(besides the subscription ID, -which is in the "Overview" section of the "Subscriptions" blade.) +To use the credentials from Python, you need: +* Application ID (a.k.a. client ID) +* Authentication key (a.k.a. client secret) +* Tenant ID +* Subscription ID from the Azure portal +[This section of the above tutorial](https://docs.microsoft.com/azure/azure-resource-manager/resource-group-create-service-principal-portal#get-application-id-and-authentication-key) describes where to find them (besides the subscription ID, which is in the "Overview" section of the "Subscriptions" blade.) + +The recommended practice is to store these three values in environment variables called `AZURE_TENANT_ID`, `AZURE_CLIENT_ID`, and `AZURE_CLIENT_SECRET`. To set an environment variable use the following commands: +```Shell +$env:AZURE_TENANT_ID='' # PowerShell only +set AZURE_TENANT_ID= # Windows CMD (alternatively, use export AZURE_TENANT_ID= to export to the global env) +export AZURE_TENANT_ID= # Linux shell only +``` +*** Note: when setting these variables, do not wrap the value in quotation marks *** You are now able to log in from Python using OAuth. You can test with this code: ```python +import os from azure.common.credentials import ServicePrincipalCredentials credentials = ServicePrincipalCredentials( - client_id = 'ABCDEFGH-1234-ABCD-1234-ABCDEFGHIJKL', - secret = 'XXXXXXXXXXXXXXXXXXXXXXXX', - tenant = 'ABCDEFGH-1234-ABCD-1234-ABCDEFGHIJKL' + client_id = os.environ['AZURE_CLIENT_ID'], + secret = os.environ['AZURE_CLIENT_SECRET'], + tenant = os.environ['AZURE_TENANT_ID'] ) ``` ## Providing credentials to the tests -When you run tests in playback mode, -they use a fake credentials file, -located at `tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py`, -to simulate authenticating with Azure. +When you run tests in playback mode, they use a fake credentials file, located at [`tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py`][mgmt_settings_fake], to simulate authenticating with Azure. In most scenarios you will not have to adjust this file, you will have to make edits to this file if your service uses values that are not already included in the `mgmt_settings_fake.py` file. -In live mode, you need to use actual credentials -like those you obtained in the previous section. -To enable the tests to use them, -make a copy of the `mgmt_settings_fake.py` file in the same location, -and rename it `mgmt_settings_real.py`. +In live mode, you need to use real credentials like those you obtained in the previous section. To enable the tests to use them, make a copy of the `mgmt_settings_fake.py` file in the same location, and rename it `mgmt_settings_real.py`. Then make the following changes: -* Change the value of the `SUBSCRIPTION_ID` constant to your subscription ID. - (If you don't have it, - you can find it in the "Overview" section of the "Subscriptions" blade - in the [Azure portal](https://portal.azure.com/).) -* Change the `get_credentials()` function to construct and return - a `UserPassCredentials` or `ServicePrincipalCredentials` object - such as you constructed in the samples in the previous section. - (Don't forget to make sure the necessary imports are present as well!) +* Change the value of the `SUBSCRIPTION_ID` constant to your subscription ID. (If you don't have it, you can find it in the "Overview" section of the "Subscriptions" blade in the [Azure portal][azure_portal].) +* Change the `get_credentials()` function to construct and return a `UserPassCredentials` (Don't forget to make sure the necessary imports are present as well!). +```python +def get_credentials(**kwargs): + import os + from azure.common.credentials import ServicePrincipalCredentials + + return ServicePrincipalCredentials( + client_id = os.environ['AZURE_CLIENT_ID'], + secret = os.environ['AZURE_CLIENT_SECRET'], + tenant = os.environ['AZURE_TENANT_ID'] + ) +``` +* Change the `get_azure_core_credentials()` function to construct and return a `ClientSecretCredential`: +```python +def get_azure_core_credentials(**kwargs): + from azure.identity import ClientSecretCredential + import os + return ClientSecretCredential( + client_id = os.environ['AZURE_CLIENT_ID'], + client_secret = os.environ['AZURE_CLIENT_SECRET'], + tenant_id = os.environ['AZURE_TENANT_ID'] + ) +``` +These two methods are used by the authentication methods within `AzureTestCase` to provide the correct credential for your client class, you do not need to call these methods directly. Authenticating clients will be discussed further in the [examples](#writing-management-plane-test) section. **Important: `mgmt_settings_real.py` should not be committed since it contains your actual credentials! To prevent this, it is included in `.gitignore`.** ## Running tests in live mode To configure the tests to run in live mode, you have two options: - -* Set the environment variable `AZURE_TEST_RUN_LIVE` to "true" or "yes". - If you want to go back to playback mode you can either unset it entirely - or set it to "false" or "no". -* Create a `testsettings_local.cfg` file in the same directory as - `mgmt_settings_real.py`. It should look like the following: +* Set the environment variable `AZURE_TEST_RUN_LIVE` to "true" or "yes". +* Create the `tools/azure-sdk-tools/devtools_testutils/testsettings_local.cfg` file and copy and paste the following line: ``` live-mode: true ``` - To go back to playback mode using the config file, - change the "true" to "false" or delete the file. - (`testsettings_local.cfg` is listed in `.gitignore` - and not present in the repo; if it's missing, - the tests default to playback mode.) - -Now you can run tests using the same method described in -[Running the tests](#running-the-tests). -You would be well-advised to specify a limited number of tests to run. -Running every existing test in live mode will take a very long time -and produce a great deal of changes to recording files in your Git repository. +(`testsettings_local.cfg` is listed in `.gitignore` and not present in the repo; if it's missing, the tests default to playback mode.) -# Writing new tests +Now you can run tests using the same method described in [Running the tests](#running-the-tests). You would be well-advised to specify a limited number of tests to run. Running every existing test in live mode will take a very long time and produce a great deal of changes to recording files in your Git repository. However, for changes in the client code, the recordings will need to be committed to the Git repository. -SDK tests are based on the `scenario_tests` subpackage of the -[`azure-devtools`](https://pypi.python.org/pypi/azure-devtools) package. -`scenario_tests` is a general, mostly abstract framework -providing several features useful for the SDK tests, for example: +## Running tests in playback mode +Now that the tests have been run against live resources and generated the HTTP recordings, you can run your tests in playback mode. There are two options for changing from live mode to playback mode: +* Set the environment variable `AZURE_TEST_RUN_LIVE` to "false" or "no". +* Change the `tools/azure-sdk-tools/devtools_testutils/testsettings_local.cfg` file to: + ``` + live-mode: false + ``` -* HTTP interaction recording and playback using [vcrpy](https://pypi.python.org/pypi/vcrpy) -* Creation and cleanup of helper resources, such as resource groups, - for tests that need them in order to test other services -* Processors for modifying requests and responses when writing or reading recordings - (for example, to avoid recording credential information) -* Patches for overriding functions and methods that don't work well with tests - (such as long-running operations) +# Writing new tests -Code in the [`azure-sdk-tools/devtools_testutils`](tools/azure-sdk-tools/devtools_testutils) directory -provides concrete implementations of the features provided in `scenario_tests` -that are oriented around use in SDK testing -and that you can use directly in your unit tests. +Code in the [`azure-sdk-tools/devtools_testutils`][devtools_testutils] directory provides concrete implementations of the features provided in `scenario_tests` that are oriented around use in SDK testing and that you can use directly in your unit tests. ## Test structure -New tests should be located alongside the packages containing the code they test. -For example, the tests for `azure-mgmt-media` are in `azure-mgmt-media/tests`. -Each test folder also has a `recordings` subfolder containing one .yaml recording file -for each test that has HTTP interactions to record. +New tests should be located alongside the packages containing the code they test. For example, the tests for `azure-mgmt-media` are in `azure-mgmt-media/tests`. There are also legacy tests in the following three locations: @@ -195,11 +152,9 @@ There are also legacy tests in the following three locations: For more information about legacy tests, see [Legacy tests](https://github.com/Azure/azure-sdk-for-python/wiki/Legacy-tests). -## Using the Azure Python SDK test framework +## Writing management plane test -This section will demonstrate writing tests using `devtools_testutils` -with a few increasingly sophisticated examples -to show how to use some of the features of the underlying test frameworks. +Management plane SDKs are those that are formatted `azure-mgmt-xxxx`, otherwise the SDK is data plane. Management plane SDKs work against the [Azure Resource Manager APIs][arm_apis], while the data plane SDKs will work against service APIs. This section will demonstrate writing tests using `devtools_testutils` with a few increasingly sophisticated examples to show how to use some of the features of the underlying test frameworks. ### Example 1: Basic Azure service interaction and recording @@ -222,35 +177,13 @@ class ExampleResourceGroupTestCase(AzureMgmtTestCase): self.client.resource_groups.delete(group.name).wait() ``` -This simple test creates a resource group and checks that its name -is assigned correctly. +This simple test creates a resource group and checks that its name is assigned correctly. Notes: - -1. This test inherits all necessary behavior for HTTP recording and playback - described previously in this document - from its `AzureMgmtTestCase` superclass. - You don't need to do anything special to implement it. -2. The `get_resource_name()` helper method of `AzureMgmtTestCase` - creates a pseudorandom name based on the parameter - and the names of the test file and method. - This ensures that the name generated is the same for each run - of the same test, thereby ensuring reproducability, - but prevents name collisions if the tests are run live - and the same parameter is used from several different tests. -3. The `create_mgmt_client()` helper method of `AzureMgmtTestCase` - creates a client object using the credentials - from `mgmt_settings_fake.py` or `mgmt_settings_real.py` as appropriate, - with some checks to make sure it's created successfully - and cause the unit test to fail if not. - You should use it for any clients you create. -4. Note that this test cleans up the resource group it creates! - If you create resources yourself as part of the test, - make sure to delete them afterwards. - But if you need something like a resource group - as a prerequisite for what you're actually trying to test, - you should use a "preparer" as demonstrated in the following two examples. - Preparers will create and clean up helper resources for you. +1. This test inherits all necessary behavior for HTTP recording and playback described previously in this document from its `AzureMgmtTestCase` superclass. You don't need to do anything special to implement it. +2. The `get_resource_name()` helper method of `AzureMgmtTestCase` creates a pseudorandom name based on the parameter and the names of the test file and method. This ensures that the name generated is the same for each run of the same test, ensuring reproducability and preventing name collisions if the tests are run live and the same parameter is used from several different tests. +3. The `create_mgmt_client()` helper method of `AzureMgmtTestCase` creates a client object using the credentials from `mgmt_settings_fake.py` or `mgmt_settings_real.py` as appropriate, with some checks to make sure it's created successfully and cause the unit test to fail if not. You should use it for any clients you create. +4. While the test cleans up the resource group it creates, you will need to manually delete any resources you've created independent of the test framework. But if you need something like a resource group as a prerequisite for what you're actually trying to test, you should use a "preparer" as demonstrated in the following two examples. Preparers will create and clean up helper resources for you. ### Example 2: Basic preparer usage @@ -281,44 +214,21 @@ class ExampleSqlServerTestCase(AzureMgmtTestCase): self.assertEqual(server.name, test_server_name) ``` -This test creates a SQL server and confirms that its name is set correctly. -Because a SQL server must be created in a resource group, -the test uses a `ResourceGroupPreparer` to create a group for use in the test. - -Preparers are [decorators](https://www.python.org/dev/peps/pep-0318/) -that "wrap" a test method, -transparently replacing it with another function that has some additional functionality -before and after it's run. -For example, the `@ResourceGroupPreparer` decorator adds the following to the wrapped method: -* creates a resource group -* inspects the argument signature of the wrapped method - and passes in information about the created resource group - if appropriately-named parameters - (here, `resource_group` and `location`) are present -* deletes the resource group after the test is run +This test creates a SQL server and confirms that its name is set correctly. Because a SQL server must be created in a resource group, the test uses a `ResourceGroupPreparer` to create a group for use in the test. + +Preparers are [decorators][decorators] that "wrap" a test method, transparently replacing it with another function that has some additional functionality before and after it's run. For example, the `@ResourceGroupPreparer` decorator adds the following to the wrapped method: +* creates a resource group +* inspects the argument signature of the wrapped method and passes in information about the created resource group if appropriately-named parameters (here, `resource_group` and `location`) are present +* deletes the resource group after the test is run Notes: +1. HTTP interactions undertaken by preparers to create and delete the prepared resource are not recorded or played back, as they're not part of what the test is testing. +2. If the test is run in playback mode, the `resource_group` parameter will be a simple `FakeResource` object with a pseudorandom `name` attribute and a blank `id` attribute. If you need a more sophisticated fake object, see the next example. +3. Why not use a preparer in Example 1, above? -1. HTTP interactions undertaken by preparers - to create and delete the prepared resource - are not recorded or played back, - as they're not part of what the test is testing. -2. If the test is run in playback mode, - the `resource_group` parameter will be a simple `FakeResource` object - with a pseudorandom `name` attribute and a blank `id` attribute. - If you need a more sophisticated fake object, see the next example. -3. Why not use a preparer in Example 1, above? - - Preparers are only for *auxiliary* resources - that aren't part of the main focus of the test. - In example 1, we want to test the actual creation and naming - of the resource group, so those operations are part of the test. - By contrast, in example 2, the subject of the test - is the SQL server management operations; - the resource group is just a prerequisite for those operations. - We only want this test to fail if something is wrong with the SQL server creation. - If there's something wrong with the resource group creation, - there should be a dedicated test for that. + Preparers are only for *auxiliary* resources that aren't part of the main focus of the test. In example 1, we want to test the actual creation and naming of the resource group, so those operations are part of the test. + By contrast, in example 2, the subject of the test is the SQL server management operations; the resource group is just a prerequisite for those operations. We only want this test to fail if something is wrong with the SQL server creation. + If there's something wrong with the resource group creation, there should be a dedicated test for that. ### Example 3: More complicated preparer usage @@ -361,35 +271,10 @@ This test creates a media service and confirms that its name is set correctly. Notes: -1. Here, we want to test creation of a media service, - which requires a storage account. - We want to use a preparer for this, - but creation of a storage account itself needs a resource group. - So we need both a `ResourceGroupPreparer` and a `StorageAccountPreparer`, - in that order. -2. Both preparers are customized. - We pass a `parameter_name` keyword argument of `group` to `ResourceGroupPreparer`, - and as a result the resource group is passed into the test method - through the `group` parameter (rather than the default `resource_group`). - Then, because `StorageAccountPreparer` needs a resource group, - we need to let it know about the modified parameter name. - We do so with the `resource_group_parameter_name` argument. - Finally, we pass a `name_prefix` to `StorageAccountPreparer`. - The names it generates by default include the fully qualified test name, - and so tend to be longer than is allowed for storage accounts. - You'll probably always need to use `name_prefix` with `StorageAccountPreparer`. -3. We want to ensure that the group retrieved by `get_properties` - has a `kind` of `BlobStorage`. - We create a `FakeStorageAccount` object with that attribute - and pass it to `StorageAccountPreparer`, - and also pass the `kind` keyword argument to `StorageAccountPreparer` - so that it will be passed through when a storage account is prepared for real. -4. Similarly to how a resource group parameter is added by `ResourceGroupPreparer`, - `StorageAccountPreparer` passes the model object for the created storage account - as the `storage_account` parameter, and that parameter's name can be customized. - `StorageAccountPreparer` also creates an account access key - and passes it into the test method through a parameter whose name is formed - by appending `_key` to the name of the parameter for the account itself. +1. Here, we want to test creation of a media service, which requires a storage account. We want to use a preparer for this, but creation of a storage account itself needs a resource group. So we need both a `ResourceGroupPreparer` and a `StorageAccountPreparer`, in that order. +2. Both preparers are customized. We pass a `parameter_name` keyword argument of `group` to `ResourceGroupPreparer`, and as a result the resource group is passed into the test method through the `group` parameter (rather than the default `resource_group`). Then, because `StorageAccountPreparer` needs a resource group, we need to let it know about the modified parameter name. We do so with the `resource_group_parameter_name` argument. Finally, we pass a `name_prefix` to `StorageAccountPreparer`. The names it generates by default include the fully qualified test name, and so tend to be longer than is allowed for storage accounts. You'll probably always need to use `name_prefix` with `StorageAccountPreparer`. +3. We want to ensure that the group retrieved by `get_properties` has a `kind` of `BlobStorage`. We create a `FakeStorageAccount` object with that attribute and pass it to `StorageAccountPreparer`, and also pass the `kind` keyword argument to `StorageAccountPreparer` so that it will be passed through when a storage account is prepared for real. +4. Similarly to how a resource group parameter is added by `ResourceGroupPreparer`, `StorageAccountPreparer` passes the model object for the created storage account as the `storage_account` parameter, and that parameter's name can be customized. `StorageAccountPreparer` also creates an account access key and passes it into the test method through a parameter whose name is formed by appending `_key` to the name of the parameter for the account itself. ### Example 4: Different endpoint than public Azure (China, Dogfood, etc.) @@ -424,3 +309,13 @@ class ExampleSqlServerTestCase(AzureMgmtTestCase): self.assertEqual(server.name, test_server_name) ``` + +[arm_apis]: https://docs.microsoft.com/rest/api/resources/ +[azure_devtools]: https://pypi.org/project/azure-devtools/ +[azure_portal]: https://portal.azure.com/ +[decorators]: https://www.python.org/dev/peps/pep-0318/ +[dev_setup]: https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/dev_setup.md +[devtools_testutils]: https://github.com/Azure/azure-sdk-for-python/tree/master/tools/azure-sdk-tools/devtools_testutils +[mgmt_settings_fake]: https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py +[pytest]: https://docs.pytest.org/en/latest/ +[vcrpy]: https://pypi.python.org/pypi/vcrpy \ No newline at end of file diff --git a/doc/dev/packaging.md b/doc/dev/packaging.md index 642d1ef6eacd..861840f32656 100644 --- a/doc/dev/packaging.md +++ b/doc/dev/packaging.md @@ -1,25 +1,25 @@ # Azure packaging -This article describes how to declare setup.py and all packaging information for packages inside the "azure" namespace +This article describes how to declare setup.py and all packaging information for packages inside the `azure` namespace Namespace packaging is complicated in Python, here's a few reading if you still doubt it: - https://packaging.python.org/guides/packaging-namespace-packages/ - https://www.python.org/dev/peps/pep-0420/ - https://github.com/pypa/sample-namespace-packages -This articles describes the recommendation on how to do it if you want to release a package inside the "azure" namespace. Being inside the "azure" namespace meaning you have a service "myservice" that you want to import using: +This article describes the recommendation on how to define namespace packaging to release a package inside the `azure` namespace. Being inside the `azure` namespace meaning you have a service `myservice` that you want to import using: ```python import azure.myservice ``` -Notes: +Note: - This article is not about setup.py or setup.cfg or the right way to *write* the packaging, it's about what instructions you should use to achieve this. If you are fluent in setuptools, and prefer to write the suggestions in setup.cfg and not in setup.py, this is not a concern. # What are the constraints? We want to build sdist and wheels in order to follow the following constraints: -- Solution should work with *decent* versions of pip and setuptools (not the very latest only, but not archaeology either) -- Wheels must work with Python 2.7 and 3.4+ +- Solution should work with *recent* versions of pip and setuptools (not the very latest only, but not archaeology either) +- Wheels must work with Python 2.7 and 3.6+ - easy-install scenario is a plus, but cannot be considered critical anymore - mixed dev installation and PyPI installation should be explicitly addressed @@ -39,12 +39,14 @@ Your MANIFEST.in must include the following line `include azure/__init__.py`. Example: ```shell -include *.rst +include *.md include azure/__init__.py +recursive-include tests *.py +recursive-include samples *.py *.md ``` In your setup.py: -The "packages" section MUST EXCLUDE the "azure" package. Example: +The "packages" section MUST EXCLUDE the `azure` package. Example: ```python packages=find_packages(exclude=[ 'tests', @@ -53,11 +55,12 @@ The "packages" section MUST EXCLUDE the "azure" package. Example: ]), ``` -The "extras_requires" section MUST include a conditional dependency on "azure-nspkg" for Python 2. Example: +The "extras_requires" section MUST include a conditional dependency on "azure-nspkg" for Python 2. There is also a conditional dependency on "typing" for Python 3.5 because of the type-hinting for Python 3.5 and above. Example: ```python extras_require={ ":python_version<'3.0'": ['azure-nspkg'], + ":python_version<'3.5'": ['typing'], } ``` @@ -108,15 +111,16 @@ setup( author_email='azpysdkhelp@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python', classifiers=[ - 'Development Status :: 5 - Production/Stable', + 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: MIT License', ], zip_safe=False, @@ -132,6 +136,7 @@ setup( ], extras_require={ ":python_version<'3.0'": ['azure-nspkg'], + ":python_version<'3.5'": ['typing'], } ) ``` @@ -142,4 +147,4 @@ This syntax works with setuptools >= 17.1 and pip >= 6.0, which is considered en - wheels must NOT contain a `azure/__init__.py` file (you can open it with a zip util to check) - wheels installs `azure-nskpg` ONLY on Python 2. -- sdist must contains a `azure/__init__.py` file that declares the "azure" as a namespace package using the `pkgutil` syntax +- sdist must contain a `azure/__init__.py` file that declares `azure` as a namespace package using the `pkgutil` syntax diff --git a/doc/dev/perfstress_tests.md b/doc/dev/perfstress_tests.md new file mode 100644 index 000000000000..3b68426f62aa --- /dev/null +++ b/doc/dev/perfstress_tests.md @@ -0,0 +1,355 @@ +# Table of Contents +1. [The perfstress framework](#the-perfstress-framework) + - [The PerfStressTest base](#the-perfstresstest-base) + - [Default command options](#default-command-options) +2. [Adding performance tests to an SDK](#adding-performance-tests-to-an-sdk) + - [Writing a test](#writing-a-test) + - [Adding legacy T1 tests](#adding-legacy-t1-tests) +3. [Running the tests](#running-the-tests) +4. [Readme](#readme) + +# The perfstress framework + +The perfstress framework has been added to azure-devtools module. The code can be found [here](https://github.com/Azure/azure-sdk-for-python/tree/master/tools/azure-devtools/src/azure_devtools/perfstress_tests). +The framework provides a baseclass to inherit from when writing tests, as well as some tools and utilities to facilitate running +the tests. To start using the framework, make sure that `azure-devtools` is included in the `dev_requirements.txt` for the SDK: +``` +-e ../../../tools/azure-devtools +``` +The perfstress framework offers the following: +- The `perfstress` commandline tool. +- The `PerfStressTest` baseclass. +- Stream utilities for uploading/downloading without storing in memory: `RandomStream`, `AsyncRandomStream`, `WriteStream`. +- A `get_random_bytes` utility for returning randomly generated data. +- A series of "system tests" to test the perfstress framework along with the performance of the raw transport layers (requests, aiohttp, etc). + +## The PerfStressTest base +The `PerfStressTest` base class is what will be used for all perf test implementations. It provides the following API: +```python +class PerfStressTest: + args = {} # Command line arguments + + def __init__(self, arguments): + # The command line args can be accessed on construction. + + async def global_setup(self): + # Can be optionally defined. Only run once, regardless of parallelism. + + async def global_cleanup(self): + # Can be optionally defined. Only run once, regardless of parallelism. + + async def setup(self): + # Can be optionally defined. Run once per test instance, after global_setup. + + async def cleanup(self): + # Can be optionally defined. Run once per test instance, before global_cleanup. + + async def close(self): + # Can be optionally defined. Run once per test instance, after cleanup and global_cleanup. + + def run_sync(self): + # Must be implemented. This will be the perf test to be run synchronously. + + async def run_async(self): + # Must be implemented. This will be the perf test to be run asynchronously. + # If writing a test for a T1 legacy SDK with no async, implement this method and raise an exception. + + @staticmethod + def add_arguments(parser): + # Override this method to add test-specific argparser args to the class. + # These are accessible in __init__() and the self.args property. + + @staticmethod + def get_from_env(variable): + # Get the value of an env var. If empty or not found, a ValueError will be raised. +``` +## Default command options +The framework has a series of common command line options built in: +- `--duration=10` Number of seconds to run as many operations (the "run" function) as possible. Default is 10. +- `--iterations=1` Number of test iterations to run. Default is 1. +- `--parallel=1` Number of tests to run in parallel. Default is 1. +- `--warm-up=5` Number of seconds to spend warming up the connection before measuring begins. Default is 5. +- `--sync` Whether to run the tests in sync or async. Default is False (async). +- `--no-cleanup` Whether to keep newly created resources after test run. Default is False (resources will be deleted). + +# Adding performance tests to an SDK +The performance tests will be in a submodule called `perfstress_tests` within the `tests` directory in an SDK project. +For example: +``` +sdk/storage/azure-storage-blob/tests/perfstress_tests +``` +This `perfstress_tests` directory is a module, and so must contain an `__init__.py` file. This can be empty. + +## Writing a test +To add a test, import and inherit from `PerfStressTest` and populate the functions as needed. +The name of the class will be the name of the perf test, and is what will be passed into the command line to execute that test. +```python +from azure_devtools.perfstress_tests import PerfStressTest + +from azure.storage.blob import BlobServiceClient as SyncBlobServiceClient +from azure.storage.blob.aio import BlobServiceClient as AsyncBlobServiceClient + + +class ListContainersTest(PerfStressTest): + + def __init__(self, arguments): + super().__init__(arguments) + + # Auth configuration + connection_string = self.get_from_env("AZURE_STORAGE_CONNECTION_STRING") + + # Create clients + self.service_client = SyncBlobServiceClient.from_connection_string(conn_str=connection_string) + self.async_service_client = AsyncBlobServiceClient.from_connection_string(conn_str=connection_string) + + async def global_setup(self): + """The global setup is run only once. + + Use this for any setup that can be reused multiple times by all test instances. + """ + await super().global_setup() + containers = [self.async_service_client.create_container(str(i)) for i in self.args.num_containers] + await asyncio.wait(containers) + + async def global_cleanup(self): + """The global cleanup is run only once. + + Use this to cleanup any resources created in setup. + """ + async for container in self.async_service_client.list_containers(): + await self.async_service_client.delete_container(container) + await super().global_cleanup() + + async def close(self): + """This is run after cleanup. + + Use this to close any open handles or clients. + """ + await self.async_service_client.close() + await super().close() + + def run_sync(self): + """The synchronous perf test. + + Try to keep this minimal and focused. Using only a single client API. + Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead + so that we're only measuring the client API call. + """ + for _ in self.client.list_containers(): + pass + + async def run_async(self): + """The asynchronous perf test. + + Try to keep this minimal and focused. Using only a single client API. + Avoid putting any ancilliary logic (e.g. generating UUIDs), and put this in the setup/init instead + so that we're only measuring the client API call. + """ + async for _ in self.async_client.list_containers(): + pass + + @staticmethod + def add_arguments(parser): + super(ListContainersTest, ListContainersTest).add_arguments(parser) + parser.add_argument('--num-containers', nargs='?', type=int, help='Number of containers to list. Defaults to 100', default=100) +``` +### Common test base +If you're writing a suite of tests for an SDK, that all make use of common arguments or logic, adding one or more of your own test base can be helpful. These can also be used to navigate different layers of a client hierarchy. +Here is an example Storage test base class, to be used for the Blob upload and download tests described below: +```python +from azure_devtools.perfstress_tests import PerfStressTest + +from azure.storage.blob import BlobServiceClient as SyncBlobServiceClient +from azure.storage.blob.aio import BlobServiceClient as AsyncBlobServiceClient + +class _StorageStreamTestBase(PerfStressTest): + + def __init__(self, arguments): + super().__init__(arguments) + + # Any common attributes + self.container_name = 'streamperftests' + + # Auth configuration + connection_string = self.get_from_env("AZURE_STORAGE_CONNECTION_STRING") + + # Create clients + self.service_client = SyncBlobServiceClient.from_connection_string(conn_str=connection_string) + self.async_service_client = AsyncBlobServiceClient.from_connection_string(conn_str=connection_string) + + async def global_setup(self): + await super().global_setup() + + # Any common setup used by all the streaming tests + await self.async_service_client.create_container(self.container_name) + + async def global_cleanup(self): + # Any common cleanup used by all the streaming tests + await self.async_service_client.delete_container(self.container_name) + await super().global_cleanup() + + async def close(self): + await self.async_service_client.close() + await super().close() + + @staticmethod + def add_arguments(parser): + super(ListContainersTest, ListContainersTest).add_arguments(parser) + + # Add any common arguments for the streaming test cases + parser.add_argument('--max-concurrency', nargs='?', type=int, help='Number of concurrent threads to upload/download the data. Defaults to 1.', default=1) + parser.add_argument('--size', nargs='?', type=int, help='Size in bytes for the amount of data to be streamed. Defaults to 1024 bytes', default=1024) +``` + +### Testing with streams +If you need to test any kind of streaming behaviour (e.g. upload or download) then use the provided read and write file-like implementations. These will generate random data, while not storing more than the current chunk in memory. This prevents memory errors when running with large payloads at high parallelism. +#### Example upload stream test: +```python +from azure_devtools.perfstress_tests import RandomStream, get_random_bytes +from azure_devtools.perfstress_tests import AsyncRandomStream + +from ._test_base import _StorageStreamTestBase + + +class UploadTest(_StorageStreamTestBase): + def __init__(self, arguments): + super().__init__(arguments) + + # Setup service clients + blob_name = "uploadtest" + self.blob_client = self.service_client.get_blob_client(self.container_name, blob_name) + self.async_blob_client = self.async_serive_client.get_blob_client(self.container_name, blob_name) + + # Setup readable file-like upload data sources, using the configurable 'size' argument + self.upload_stream = RandomStream(self.args.size) + self.upload_stream_async = AsyncRandomStream(self.args.size) + + def run_sync(self): + # The stream needs to be reset at the start of each run. + # This sets the position index back to 0 with minimal overhead. + self.upload_stream.reset() + + # Test the upload API + self.blob_client.upload_blob( + self.upload_stream, + length=self.args.size, + overwrite=True, + max_concurrency=self.args.max_concurrency) + + async def run_async(self): + # The stream needs to be reset at the start of each run. + # This sets the position index back to 0 with minimal overhead. + self.upload_stream_async.reset() + + # Test the upload API + await self.async_blob_client.upload_blob( + self.upload_stream_async, + length=self.args.size, + overwrite=True, + max_concurrency=self.args.max_concurrency) +``` +#### Example download stream test: +```python +from azure_devtools.perfstress_tests import get_random_bytes, WriteStream + +from ._test_base import _StorageStreamTestBase + + +class DownloadTest(_StorageStreamTestBase): + def __init__(self, arguments): + super().__init__(arguments) + + # Setup service clients + blob_name = "downloadtest" + self.blob_client = self.service_client.get_blob_client(self.container_name, blob_name) + self.async_blob_client = self.async_serive_client.get_blob_client(self.container_name, blob_name) + + self.download_stream = WriteStream() + + async def global_setup(self): + await super().global_setup() + + # Setup the test by uploading data that can be reused by all test instances. + data = get_random_bytes(self.args.size) + await self.async_blob_client.upload_blob(data) + + def run_sync(self): + # The stream needs to be reset at the start of each run. + # This sets the position index back to 0 with minimal overhead. + self.download_stream.reset() + + # Test the API + stream = self.blob_client.download_blob(max_concurrency=self.args.max_concurrency) + stream.readinto(self.download_stream) + + async def run_async(self): + # The stream needs to be reset at the start of each run. + # This sets the position index back to 0 with minimal overhead. + self.download_stream.reset() + + # Test the API + stream = await self.async_blob_client.download_blob(max_concurrency=self.args.max_concurrency) + await stream.readinto(self.download_stream) +``` +## Adding legacy T1 tests +To compare performance against T1 libraries, you can add tests for a legacy SDK. To do this, add a submodule into the `perfstress_tests` module called `T1_legacy_tests` (and add an empty `__init__.py`). +To configure the exact T1 SDK you wish to compare perf against, add a `t1_test_requirements.txt` file to install any package requirements. Note that this will likely be incompatible with the T2 SDK testing environment, and running the legacy tests will probably need to be from a separate virtual environment (see the [Running the tests](#running-the-tests) section below). +Writing the tests themselves will be done exactly the same way - however it's recommended to prefix the test names with `Legacy` (or similar) to avoid confusion. +``` +perfstress_tests +│ README.md +| __init__.py +│ upload.py +| download.py +│ +└───T1_legacy_tests +| | __init__.py +│ │ legacy_upload.py +│ │ legacy_download.py +| | t1_test_requirements.txt +``` + +# Running the tests +In order to run the performance tests, the `azure-devtools` package must be installed. This is done as part of the `dev_requirements`. +Start be creating a new virtual environment for your perf tests. This will need to be a Python 3 environment, preferably >=3.7. +Note that tests for T1 and T2 SDKs usually cannot be run from the same environment, and will need to be setup separately. + +### Setup for test resources +Depending on the tests, some resource configuration (e.g. environment variables) may need to be done first. This should documented in the perfstress_tests readme file. +Example from storage: +``` +AZURE_STORAGE_CONNECTION_STRING= +``` +### Setup for perf test runs + +```cmd +(env) ~/azure-storage-file-share> pip install -r dev_requirements.txt +(env) ~/azure-storage-file-share> pip install -e . +``` +### Setup for T1 legacy perf test runs + +```cmd +(legacy-env) ~/azure-storage-file-share> pip install -r dev_requirements.txt +(legacy-env) ~/azure-storage-file-share> pip install tests/perfstress_tests/T1_legacy_tests/t1_test_requirements.txt +``` +### Test commands + +When `azure-devtools` is installed, you will have access to the `perfstress` command line tool, which will scan the current module for runable perf tests. Only a specific test can be run at a time (i.e. there is no "run all" feature). + +```cmd +(env) ~/azure-storage-file-share> cd tests +(env) ~/azure-storage-file-share/tests> perfstress +``` +Using the `perfstress` command alone will list the available perf tests found. Note that the available tests discovered will vary depending on whether your environment is configured for the T1 or T2 SDK. + +### Example test run command +```cmd +(env) ~/azure-storage-file-share/tests> perfstress UploadTest --parallel=2 --size=10240 +``` + +# Readme + +Please add a `README.md` to the perfstress_tests directory so that others know how to setup and run the perf tests, along with a description of the available tests and any support command line options. README files in a `tests/perfstress_tests` directory should already be filtered from CI validation for SDK readmes. +Some examples can be found here: +- [Azure Storage Blob](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/storage/azure-storage-blob/tests/perfstress_tests/README.md) +- [Azure Service Bus](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/servicebus/azure-servicebus/tests/perf_tests/README.md) \ No newline at end of file diff --git a/doc/dev/release.md b/doc/dev/release.md index eb61edcef111..c65dbb72e4b7 100644 --- a/doc/dev/release.md +++ b/doc/dev/release.md @@ -16,11 +16,11 @@ Python packages are uploaded to [PyPI](https://pypi.org/). Once you've uploaded ### Production - Deploy with Azure Dev Ops -To avoid "accidental" pushes to our target repositories, [approval](https://docs.microsoft.com/en-us/azure/devops/pipelines/release/approvals/approvals?view=azure-devops) will be requested directly prior to the final PyPI publish. Reference this [wiki page](https://aka.ms/python-approval-groups) and click on `Release to PyPI Approvers` to add yourself to the group for PyPI publishing. +To avoid "accidental" pushes to our target repositories, [approval](https://docs.microsoft.com/azure/devops/pipelines/release/approvals/approvals?view=azure-devops) will be requested directly prior to the final PyPI publish. Reference this [wiki page](https://aka.ms/python-approval-groups) and click on `Release to PyPI Approvers` to add yourself to the group for PyPI publishing. Instead of a single central pipeline, the python SDK has moved to `service directory` associated build pipelines. These are driven by yml templates at the root of each service folder. [Example for storage service folder.](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/storage/ci.yml#L44) -As an aside, please note that the preview feature `multi-stage pipelines` must be enabled to properly interact with unified pipelines. If you aren't aware, find out how to enable by visiting [this link.](https://docs.microsoft.com/en-us/azure/devops/project/navigation/preview-features?view=azure-devops) +As an aside, please note that the preview feature `multi-stage pipelines` must be enabled to properly interact with unified pipelines. If you aren't aware, find out how to enable by visiting [this link.](https://docs.microsoft.com/azure/devops/project/navigation/preview-features?view=azure-devops) #### Releasing Through Unified Pipelines diff --git a/doc/dev/tests-advanced.md b/doc/dev/tests-advanced.md new file mode 100644 index 000000000000..909d768c8ada --- /dev/null +++ b/doc/dev/tests-advanced.md @@ -0,0 +1,186 @@ +# Setup Python Development Environment - Advanced +In this document we will provide additional information about the test environments: + +- [Test Mixin Classes](#test-mixin-classes) +- [Resource Preparers](#preparers) +- [Examples with Preparers](#examples-with-preparers) +- [mgmt_settings_real.py](#mgmt_settings_real-file) + +## Test Mixin Classes +Many of our test suites use a mixin class to reduce re-writing code in multiple test files. For example, in the Tables test suite there is a `_shared` directory containing two of these mixin classes, a [sync one](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/tables/azure-data-tables/tests/_shared/testcase.py) and an [async version](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/tables/azure-data-tables/tests/_shared/asynctestcase.py). These classes will often have ways to create connection strings from an account name and key, formulate the account url, configure logging, or validate service responses. In order for these mixin classes to be used by both the functional and unit tests they should inherit from `object`. For example: + +```python + +class TablesTestMixin(object): + def connection_string(self, account, key): + return "DefaultEndpointsProtocol=https;AccountName=" + account + ";AccountKey=" + str(key) + ";EndpointSuffix=core.windows.net" + + def account_url(self, account, endpoint_type): + """Return an url of storage account. + :param str storage_account: Storage account name + :param str storage_type: The Storage type part of the URL. Should be "table", or "cosmos", etc. + """ + try: + if endpoint_type == "table": + return account.primary_endpoints.table.rstrip("/") + if endpoint_type == "cosmos": + return "https://{}.table.cosmos.azure.com".format(account.name) + else: + raise ValueError("Unknown storage type {}".format(storage_type)) + except AttributeError: # Didn't find "primary_endpoints" + if endpoint_type == "table": + return 'https://{}.{}.core.windows.net'.format(account, endpoint_type) + if endpoint_type == "cosmos": + return "https://{}.table.cosmos.azure.com".format(account) + + def enable_logging(self): + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(LOGGING_FORMAT)) + self.logger.handlers = [handler] + self.logger.setLevel(logging.INFO) + self.logger.propagate = True + self.logger.disabled = False +``` + +In action this class can be used in functional tests: + +```python +class TestTablesFunctional(AzureTestCase, TablesTestMixin): + ... + def test_with_mixin(self, account, key): + conn_str = self.connection_string(account, key) + client = TableClient.from_connection_string(conn_str) + client.create_table('first') + client.create_table('second') + tables = 0 + for table in client.list_tables(): + tables += 1 + + assert tables == 2 +``` + +Or can be used in a unit test: +```python +class TestTablesUnit(TablesTestMixin): + ... + def test_valid_url(self): + account = "fake_tables_account" + credential = "fake_tables_account_key_0123456789" + + url = self.account_url(account, "tables") + client = TableClient(account_url=url, credential=credential) + + assert client is not None + assert client.account_url == "https://{}.tables.core.windows.net/".format(account) +``` + + +## Preparers +The Azure SDK team has created some in house tools to help with easier testing. These additional tools are located in the `devtools_testutils` package that was installed with your `dev_requirements.txt`. In this package are the preparers that will be commonly used throughout the repository to test various resources. A preparer is a way to programmatically create fresh resources to run our tests against and then deleting them after running a test suite. These help guarantee standardized behavior by starting each test group from a fresh resource and account. + +If this situation is a requirement for your tests, you can opt to create a new preparer for your service from the management plane library for a service. There are already a few preparers built in the [devtools_testutils](https://github.com/Azure/azure-sdk-for-python/tree/master/tools/azure-sdk-tools/devtools_testutils). Most prepares will start with the [`ResourceGroupPreparer`](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py#L29-L99) to first create a resource group for your service. + +To build your own preparer you will need to use the management plane library to create a service and pass the credentials you need into your tests. The two important methods for a preparer are the `create_resource` and `remove_resource` methods. In the `create_resource` method you will use the management client to create the resource and return a dictionary of key-value pairs. The keys will be matched with the test method parameters and passed in as positional arguments to the test. The `remove_resource` method will clean up and remove the resource to prevent a backlog of unused resources in your subscription. For examples of each of these methods, check out these examples: + +| Preparer | `create_resource` | `remove_resource` | +|-|-|-| +| Resource Group | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py#L57-L85) | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py#L87-L99) | +| Storage Account | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/storage_testcase.py#L53-L102) | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/storage_testcase.py#L104-L107) | +| KeyVault | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/keyvault_preparer.py#L84-L131) | [link](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/keyvault_preparer.py#L133-L138) | + + +## Examples with Preparers + +### Example 2: Basic Preparer Usage with Storage + +```python +import os +import pytest + +from azure.data.tables import TableServiceClient +from devtools_testutils import ( + AzureTestCase, + ResourceGroupPreparer, + StorageAccountPreparer +) + +class ExampleStorageTestCase(AzureTestCase): + + @ResourceGroupPreparer() + @StorageAcountPreparer() + def test_create_table(self, resource_group, location, storage_account, storage_account_key): + account_url = self.account_url(storage_account, "table") + client = self.create_client_from_credential(TableServiceClient, storage_account_key, account_url=account_url) + + valid_table_name = "validtablename" + table = client.create_table(valid_table_name) + + assert valid_table_name == table.table_name +``` + +This test uses preparers to create resources, then creates a table, and finally verifies the name is correct. + +Notes: +1. This test is aiming to create a new Table, which requires a storage account, which in hand requires a resource group. The first decorator (`@ResourceGroupPreparer()`) creates a new resource group, and passes the parameters of this resource group into the `@StorageAccountPreparer()` which creates the storage account. The parameters from the storage account creation is passed into the signature of `test_create_table` . +2. The `create_client_from_credential` is used again but this time with `storage_account_key` instead of getting a credential from the `self.get_credential` method showed in the previous section. The storage account preparer returns the key for the account which is a valid credential. + + +### Example 3: Cached Preparer Usage +```python +import os +import pytest + +from azure.core.exceptions import ResourceExistsError +from azure.data.tables import TableServiceClient +from devtools_testutils import ( + AzureTestCase, + CachedResourceGroupPreparer, + CachedStorageAccountPreparer +) + +class ExampleStorageTestCase(AzureTestCase): + + @CachedResourceGroupPreparer(name_prefix="storagetest") + @CachedStorageAcountPreparer(name_prefix="storagetest") + def test_create_table(self, resource_group, location, storage_account, storage_account_key): + account_url = self.account_url(storage_account, "table") + client = self.create_client_from_credential(TableServiceClient, storage_account_key, account_url=account_url) + + valid_table_name = "validtablename" + table = client.create_table(valid_table_name) + + assert valid_table_name == table.table_name + + @CachedResourceGroupPreparer(name_prefix="storagetest") + @CachedStorageAcountPreparer(name_prefix="storagetest") + def test_create_table_if_exists (self, resource_group, location, storage_account, storage_account_key): + account_url = self.account_url(storage_account, "table") + client = self.create_client_from_credential(TableServiceClient, storage_account_key, account_url=account_url) + + valid_table_name = "validtablename" + with pytest.raises(ResourceExistsError): + table = client.create_table(valid_table_name) +``` + +The first test is the same as above, the second test tries to create a table that already exists and asserts that the correct type of error is raised in response. These tests use cached preparers unlike the previous example. + +Notes: +1. The cached preparers here will first look to see if an existing resource group or storage account exists with the given parameters, in this case the `name_prefix`. For more information on what parameters differentiate a new resource group or storage account look for the `self.set_cache()` method in the preparer source code [here](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/storage_testcase.py#L49). The advantage to using a cached preparer is the time saver to re-using the same resource instead of creating a new resource for each test. However, this can increase the possibility that you have to be more exact about cleaning up the entities created in between test runs. + +## mgmt_settings_real file + +A `mgmt_settings_real.py` can be used in place of a `.env` file by copying `sdk/tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py` to `sdk/tools/azure-sdk-tools/devtools_testutils/mgmt_settings_real.py` and providing real credentials to it. The following changes need to be made to the `mgmt_settings_real.py` file: + +1. Change the value of the `SUBSCRIPTION_ID` variable to your organizations subscription ID, which can be found in the "Overview" section of the "Subscriptions" blade in the [Azure portal](https://portal.azure.com/). +2. Define `TENANT_ID`, `CLIENT_ID`, and `CLIENT_SECRET`, which are available after creating a Service Principal or can be retrieved from the Azure Portal after creating a Service Principal. Check out the [Azure docs](https://docs.microsoft.com/cli/azure/ad/sp?view=azure-cli-latest#az_ad_sp_create_for_rbac) to create a Service Principal with a simple one line command to create one. The recommended practice is to include your alias or name in the Service Principal name. +3. Change the [`get_azure_core_credentials(**kwargs):`](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py#L39-L53) function in the `mgmt_settings_real.py` file to construct and return a `ClientSecretCredential` object. Pass in the `CLIENT_ID`, `CLIENT_SECRET`, and `TENANT_ID` values to the `ClientSecretCredential` object. This method should look like this: +```python +def get_azure_core_credentials(**kwargs): + from azure.identity import ClientSecretCredential + import os + return ClientSecretCredential( + client_id = CLIENT_ID, + client_secret = CLIENT_SECRET, + tenant_id = TENANT_ID + ) +``` \ No newline at end of file diff --git a/doc/dev/tests.md b/doc/dev/tests.md new file mode 100644 index 000000000000..935484fc60c7 --- /dev/null +++ b/doc/dev/tests.md @@ -0,0 +1,345 @@ +# Setup Python Development Environment +In this document we will provide the introduction to the testing framework by: + +- [Setting up your development environment](#setup-the-development-environment) +- [Integrating with pytest](#integrate-with-the-pytest-test-framework) +- [Using Tox](#tox) +- [The `devtools_testutils` package](#devtools_testutils-package) +- [Writing New Tests](#writing-new-tests) +- [Define our credentials and settings](#define-credentials) +- [Create live test resources](#create-live-test-resources) +- [Write our test](#writing-your-test) +- [An example test](#an-example-test) +- [Run and record our tests](#run-and-record-the-test) + -[Purging secrets from recording files](#purging-secrets) + +## Setup your development environment + +The Azure SDK Python team creates libraries that are compatible with Python 2.7 and 3.5 and up. We will set up working Python virtual environments for Python 2.7, 3.5, and 3.9. It is recommended to do your development work in Python3, however it is helpful to have virtual environments for other versions to make debugging PRs easier locally. + +* Python 3.9: Use the [python website](https://www.python.org/downloads/) or the one-click experience from the [Windows store](https://www.microsoft.com/p/python-39/9p7qfqmjrfp7) (Windows only). +* Python 3.5: Use the [python website](https://www.python.org/downloads/release/python-3510/) +* Python 2.7: Use the [python website](https://www.python.org/downloads/release/python-2718/) +```cmd +C:\Users> python -m venv env +C:\Users> env\scripts\activate # PowerShell only +C:\Users> source env\bin\activate # Linux shell (Bash, ZSH, etc.) only +C:\Users> env\scripts\activate.bat # Windows CMD only +(env)C:\Users> +``` +To create virtual environment for different versions of Python use the `-p` flag to pass the specific Python executable you want to use +```cmd +C:\Users> python -m venv -p py35_venv +C:\Users> python -m venv -p py27_venv +``` + +### SDK root directory + +In the root directory of our SDK, a number of mandatory files have been added. When creating your own SDK, these files can be copied from the [`sdk/template`](https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/template) project, and modified to your needs. + +- README.md. This is the description and guidance for customers or your SDK. Please see the guide on writing a README to make sure you have the complete [content requirements and formatting](https://review.docs.microsoft.com/help/contribute-ref/contribute-ref-how-to-document-sdk?branch=master#readme). +- CHANGELOG.md. This is where you will add the summary of changes for each new release. Please see [the guidance](https://azure.github.io/azure-sdk/policies_releases.html#changelog-guidance) for correct formatting. +- setup.py. This is the 'installer' for your Python SDK. Please see [the guide on Python packaging][packaging] for details on customizing this for a specific package. +- setup.cfg. This is an artifact used in building the Python package. Please see [the guide on Python packaging][packaging] for details. +- MANIFEST.in. This is an artifact used in building the Python package. Please see [the guide on Python packaging][packaging] for details. +- dev_requirements.txt. This is for developers, and lists the packages required for running the tests and samples. See the dependency installation section below. +- sdk_packaging.toml. This configuration is used by the packaging pipeline and no further modifications should be required. + + +### Dependency installation + +Our SDK will have dependencies on other packages in the Azure Python SDK ecosystem. In order to run our tests and samples, we will need to setup our virtual environment to be able to find these external dependencies within the repo. We use the `dev_requirements.txt` to list these dependencies as relative paths (along with any other external packages that should be installed from PyPI). +The libraries currently listed in this file include `azure-core` and `azure-identity` as well as some internal tooling packages and our testing framework libraries. +These dependencies can be installed with the following command: + +```cmd +(env)azure-sdk-for-python\sdk\my-directory\my-library> pip install -r dev_requirements.txt +``` +Next we will install our Python SDK to the virtual environment as an 'editable install' - this means that as we work on the implementation, we will be able to run the package as it develops, as opposed to having to periodically rebuild and reinstall. +```cmd +(env)azure-sdk-for-python\sdk\my-directory\my-library> pip install -e . +``` + +We should now be able to open an interactive Python terminal, and execute code from our new SDK +```cmd +(env)azure-sdk-for-python\sdk\my-directory\my-library> python + +>>> import azure.my_library +>>> print(azure.my_library.__version__) +0.0.1 +``` + +### Open code in IDE + +Open the directory for your library in your preferred editor, for example VSCode. +```cmd +(env)azure-sdk-for-python\sdk\my-directory\my-library> code . +``` + +## Integrate with the pytest Test Framework +As a quick background, the Azure SDK uses the [pytest](https://docs.pytest.org/en/latest/) test runner to support creating unit and functional tests for Track 2 Azure libraries. To intall `pytest` run `pip install pytest` from your virtual environment, you can confirm the installation was successful by running `pytest -V`. The commands will run all files of the form `test_*.py` or `*_test.py` in the provided directory and its subdirectories, for more information check out the [docs](https://docs.pytest.org/en/stable/getting-started.html#run-multiple-tests). + +With the pytest test suite you can provide directories or specific tests to run rather than running the entire test suite: +```cmd +azure-sdk-for-python\sdk\my-directory\my-library> pytest +azure-sdk-for-python\sdk\my-directory\my-library> pytest +``` + +If your tests are broken up into multiple folders for organization, you can run specific folders: +```cmd +azure-sdk-for-python\sdk\my-directory\my-library> pytest .\tests\async_tests\ +azure-sdk-for-python\sdk\my-directory\my-library> pytest .\tests\async_tests\ +``` + +In addition you can provide keywords to run specific tests within the suite or within a specific file +```cmd +azure-sdk-for-python\sdk\my-directory\my-library> pytest -k +azure-sdk-for-python\sdk\my-directory\my-library> pytest -k +``` + +If you have print statements in your tests for debugging you can add the `-s` flag to send those print statements to standard output: +```cmd +azure-sdk-for-python\sdk\my-directory\my-library> pytest sdk/storage/azure-mgmt-storage/ -s +``` + +## Tox +The Python SDK uses the [tox project](https://tox.readthedocs.io/en/latest/) to automate releases, run tests, run linters, and build our documentation. The `tox.ini` file is located at `azure-sdk-for-python/eng/tox/tox.ini` for reference. You do not need to make any changes to the tox file for tox to work with your project. Tox will create a directory (`.tox`) in the head of your branch. The first time you run tox commands it may take several moments, but subsequent runs will be quicker. To install tox run the following command from within your virtual environment. +`(env) > pip install tox tox-monorepo`. + +To run a tox command from your directory use the following commands: +```cmd +azure-sdk-for-python\sdk\my-directory\my-library> tox -c ../../../eng/tox/tox.ini -e sphinx +azure-sdk-for-python\sdk\my-directory\my-library> tox -c ../../../eng/tox/tox.ini -e lint +azure-sdk-for-python\sdk\my-directory\my-library> tox -c ../../../eng/tox/tox.ini -e mypy +azure-sdk-for-python\sdk\my-directory\my-library> tox -c ../../../eng/tox/tox.ini -e whl +azure-sdk-for-python\sdk\my-directory\my-library> tox -c ../../../eng/tox/tox.ini -e sdist +azure-sdk-for-python\sdk\my_directory\my_library> tox -c ../../../eng/tox/tox.ini -e samples +azure-sdk-for-python\sdk\my_directory\my_library> tox -c ../../../eng/tox/tox.ini -e apistub +``` +A quick description of the five commands above: +* sphinx: documentation generation using the inline comments written in our code +* lint: runs pylint to make sure our code adheres to the style guidance +* mypy: runs the mypy static type checker for Python to make sure that our types are valid. In order to opt-in to mypy checks, add your package name to [this](https://github.com/Azure/azure-sdk-for-python/blob/master/eng/tox/mypy_hard_failure_packages.py) list of packages. +* whl: creates a whl package for installing our package +* sdist: creates a zipped distribution of our files that the end user could install with pip +* samples: runs all of the samples in the `samples` directory and verifies they are working correctly +* apistub: runs the [apistubgenerator](https://github.com/Azure/azure-sdk-tools/tree/master/packages/python-packages/api-stub-generator) tool on your code + +## `devtools_testutils` Package +The Azure SDK team has created some in house tools to help with easier testing. These additional tools are located in the `devtools_testutils` package that was installed with your `dev_requirements.txt`. In this package is the [`AzureTestCase`](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/azure_testcase.py#L99-L350) object which every test case object should inherit from. This management object takes care of creating and scrubbing recordings to make sure secrets are not added to the recordings files (and subsequently to the git history) and authenticating clients for test methods. + +## Writing New Tests +SDK tests are based on the `scenario_tests` subpackage located in [`azure-sdk-for-python/tools/azure-devtools/src/azure_devtools`](https://pypi.org/project/azure-devtools/). `scenario_tests` is a general, mostly abstracted framework which provides several useful features for writing SDK tests, ie: +* HTTP interaction recording and playback using [vcrpy](https://pypi.python.org/pypi/vcrpy) +* Creation and cleanup of helper resources, such as resource groups, storage accounts, etc. which can be used in order to test services +* Processors for modifying requests and responses when writing or reading recordings (for example, to to avoid recording credential information) +* Patches for overriding functions and methods that don't work well with testing frameworks (such as long-running operations) + +Code in the [`azure-sdk-tools/devtools_testutils`](https://github.com/Azure/azure-sdk-for-python/tree/master/tools/azure-sdk-tools/devtools_testutils) directory provides concrete implementations of the features provided in `scenario_tests` that are oriented around use in SDK testing and that you can use directly in your unit tests. + +## Define credentials +When you run tests in playback mode, they use a fake credentials file, located at [`tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py`][mgmt_settings_fake] to simulate authenticating with Azure. + +In live mode, the credentials need to be real so that the tests are able to connect to the service. Create a `.env` file at the root of the repository (in the same directory as the `sdk`, `tools`, `eng` folders). In this file you can define any environment variables you need for a test and that will be loaded by the `AzureTestCase` file. +1. Add the `SUBSCRIPTION_ID` variable with your organizations subscription ID. If you don't have it, you can find it in the "Overview" section of the "Subscriptions" blade in the [Azure portal](https://portal.azure.com/). +2. Define the `AZURE_TENANT_ID`, `AZURE_CLIENT_ID`, and `AZURE_CLIENT_SECRET` which are available after creating a Service Principal or can be retrieved from the Azure Portal if you have already created a Service Principal. If you do not have a Service Principal, check out the [Azure docs](https://docs.microsoft.com/cli/azure/ad/sp?view=azure-cli-latest#az_ad_sp_create_for_rbac) on a simple one line command to create one. The recommended practice is to include your alias or name in the Service Principal name. + Your `.env` file stores secrets in plain text so it is important that the contents of this file are not committed to the git repository. +3. Create the `tools/azure-sdk-tools/devtools_testutils/testsettings_local.cfg` file and copy and paste the following line: +``` +live-mode: true +``` + +## Create Live Test Resources +The Azure Python SDK library has two ways of providing live resources to our tests: +* Using an ArmTemplate and the PowerShellPreparer (we will demonstrate this one) + * [PowerShell preparer implementation](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/powershell_preparer.py) + * [In line use](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/schemaregistry/azure-schemaregistry/tests/test_schema_registry.py#L30-L39) for the schemaregistry library +* Using an individualized preparer such as the storage preparer + * [Storage preparer implementation](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/storage_testcase.py) + * [In line use](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/storage/azure-storage-blob/tests/test_blob_client.py#L49-L61) for the blob client + +If your library has a management plane library, you can build a preparer specific to your service using the storage preparer as an example. It is recommended that you use a PowerShellPreparer for new libraries and those without management plane libraries. The `PowerShellPreparer` uses the `New-TestResources.ps1` script to deploy resources using an ARM Template. This script and information about running it can be found in the [`eng/common/TestResources`](https://github.com/Azure/azure-sdk-for-python/tree/master/eng/common/TestResources#live-test-resource-management) directory. For more information about the engineering systems in Azure SDK, check out their [wiki][engsys_wiki] + +1. Create an Azure Resource Management Template for your specific service and the configuration you need. This can be done in the portal by creating the resources and at the very last step (Review + Create) clicking "Download a template for automation". Save this template to a `test-resources.json` file under the directory that contains your library (`sdk//test-resources.json`). +2. Use the [`New-TestResources.ps1`](https://github.com/Azure/azure-sdk-for-python/tree/master/eng/common/TestResources#on-the-desktop) script to deploy those resources. +3. Set the environment variables returned from step 2 in your current shell or add them to your `.env` file at the root of the repo to save these secrets. If you choose the latter method, you will have to make sure all the key-value pairs are in the format `=`, rather than the `${env:} = ''` formatting used in PowerShell. The names of the environment variables should be in all capital letters, snake case, and be prefixed with the library name. Ie. `TABLES_PRIMARY_KEY`, `FORMRECOGNIZER_ACCOUNT_URL`, `EVENTHUBS_SECRET_KEY`. +4. Create a partial implementation of the PowerShellPreparer to pass in your specific environment variables. An example implementation is shown below for schemaregistry + +```python +import functools +from devtools_testutils import PowerShellPreparer + +MyServicePreparer = functools.partial( + PowerShellPreparer, "", + schemaregistry_endpoint="fake_resource.servicebus.windows.net", + schemaregistry_group="fakegroup" +) +``` + +The parameters for the `functools.partial` method are: +* The `PowerShellPreparer` class +* The library folder that holds your code (ie. `sdk/schemaregistry`). This value is used to search your environment variables for the appropriate values. +* The remaining arguments are key-value kwargs, with the keys being the environment variables needed for the tests, and the value being a fake value for replacing the actual value in the recordings. The fake value in this implementation will replace the real value in the recording to make sure the secret keys are not committed to the recordings. These values should closely resemble the values because they are used in playback mode and will need to pass any client side validation. The fake value should also be a unique value to the other key-value pairs. + +## Write your tests + +In the `tests` directory create a file with the naming pattern `test_.py`. The base of each testing file will be roughly the same: + +```python +import functools +import pytest + +from devtools_testutils import AzureTestCase, PowerShellPreparer + +from azure.schemaregistry import SchemaRegistryClient + +SchemaRegistryPreparer = functools.partial( + PowerShellPreparer, 'schemaregistry', + schemaregistry_endpoint="fake_resouce.servicebus.windows.net", + schemaregistry_group="fakegroup" +) + +class TestSchemaRegistry(AzureTestCase): + +# Start with any helper functions you might need, for example a client creation method: + def create_schemareg_client(self, endpoint): + credential = self.get_credential(SchemaRegistryClient) + client = self.create_client_from_credential(SchemaRegistryClient, credential=credential, endpoint=endpoint) + return client + + ... + +# Write your tests + @SchemaRegistryPreparer() + def test_client_creation(self, schemaregistry_endpoint): + client = self.create_schemareg_client(schemaregistry_endpoint) + assert client is not None + +``` + +There's a lot going on in the example so we'll take this piece by piece: + +* Import everything you will need in your tests as normal, add to your imports the line `from devtools_testutils import AzureTestCase, PowerShellPreparer`. These two objects give our tests a lot of the desired powers. +* `AzureTestCase`: the test class should inherit from this object (`class TestSchemaRegistry(AzureTestCase)`), doing so sets up the recording infrastructure and the client creation methods. +* `PowerShellPreparer`: this preparer serves two purposes. + * First, it will provide the live keys we need to test our library against live resources. + * Second, it will keep those same live keys out of our recordings to make sure that we are not leaking our secrets into the recordings. +* At the top of your test class you should include any helper methods you will need. Most libraries will have a client creation method to eliminate repetitive code. +* Following your helper methods will be your actual tests. All test methods must start with "test". The preparer built at the top of the file should decorate your test in the fashion: `@MyPreparer()`. + * The signature of your test will always contain `self`, and following self will be all the keys that you need from your preparer. A test does not need to have every key passed into it, the test framework will take care of passing in only the parameters specifically requested in the test signature. + +If you need logging functionality for your testing, pytest also offers [logging](https://docs.pytest.org/en/stable/logging.html) capabilities either inline through the `caplog` fixture or with command line flags. + +## An example test +An example test for schemaregistry looks like: +```python +class SchemaRegistryTestCase(AzureTestCase): + + ... + @SchemaRegistryPreparer() + def test_schema_basic(self, schemaregistry_endpoint, schemaregistry_group): + client = self.create_client(schemaregistry_endpoint) + schema_name = self.get_resource_name('test-schema-basic') + schema_str = """{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string"},{"name":"favorite_number","type":["int","null"]},{"name":"favorite_color","type":["string","null"]}]}""" + serialization_type = "Avro" + schema_properties = client.register_schema(schemaregistry_group, schema_name, serialization_type, schema_str) + + assert schema_properties.schema_id is not None + assert schema_properties.location is not None + assert schema_properties.location_by_id is not None + assert schema_properties.version is 1 + assert schema_properties.serialization_type == "Avro" + + with pytest.raises(HttpResponseError): + client.get_schema('a' * 32) +``` +The `AzureTestCase` class has the ability to define a client by passing in the client object and the account URL, without having to worry about identity. Test files should not import `azure.identity`, the `self.create_basic_client` will take care of loading environment variables and creating the default credentials. + +The test infrastructure heavily leverages the `assert` keyword, which tests if the condition following it is true, and if it is not the program will raise an `AssertionError`. When writing tests, any uncaught exception results in a failure, from an assert or from the code itself (ie. `TypeError`, `ValueError`, `HttpResponseError`, etc.). The assert statements are testing that all the exected properties of the returned object are not `None`, and the last two assert statements verify that the tested properties are a given value. The last two lines of the test use a [context manager](https://docs.python.org/3/library/contextlib.html) used from the `pytest` library that tests whether the following block of code will raise a certain exception. The `client.get_schema('a' * 32)` is expected to fail because it does not exist, and we expect this test to raise an error that is an instance of `HttpResponseError`. + +## Run and record the test + +From your terminal run the `pytest` command to run all the tests that you have written so far. + +```cmd +(env)azure-sdk-for-python\sdk\my-directory\my-library> pytest +``` + +Your update should run smooth and have green dots representing passing tests. Now if you look at the contents of your `tests` directory there should be a new directory called `recording` with four `.yaml` files. Each `yaml` file is a recording for a single test. To run a test in playback mode change the `testsettings_local.cfg` to `live-mode: false` and rerun the tests with the same command. The test infrastructure will use the automatically created `.yaml` recordings to mock the HTTP traffic and run the tests. + +### Purging Secrets + +The `yaml` files created from running tests in live mode store the request and response interactions between the library and the service and this can include authorization, account names, shared access signatures, and other secrets. The recordings are included in our public GitHub repository, making it important for us to remove any secrets from these recordings before committing them to the repository. There are two easy ways to remove secrets. The first is the `PowerShellPreparer` implementation, discussed above. This method will automatically purge the keys with the provided fake values. The second way is to use the `self.scrubber.register_name_pair(key, fake_key)` method (This method is a function of the base `AzureTestCase` class), which is used when a secret is dynamically created during a test. For example, [Tables](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/tables/azure-data-tables/tests/_shared/cosmos_testcase.py#L86-L89) uses this method to replace storage account names with standard names. + +#### Special Case: Shared Access Signature + +Tests that use the Shared Access Signature (SAS) to authenticate a client should use the [`AzureTestCase.generate_sas`](https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/azure_testcase.py#L357-L370) method to generate the SAS and purge the value from the recordings. An example of using this method can be found [here](https://github.com/Azure/azure-sdk-for-python/blob/78650ba08523c14227ce8139cba5f4d1e6ed7956/sdk/tables/azure-data-tables/tests/test_table_entity.py#L1628-L1636). The method takes any number of positional arguments, with the first being the method that creates the SAS, and any number of keyword arguments (**kwargs). The method will be purged appropriately and allow for these tests to be run in playback mode. + +## Functional vs. Unit Tests + +The test written above is a functional test, it generates HTTP traffic and sends data to the service. Most of our clients have some client-side validation for account names, formatting, or properties that do not generate HTTP traffic. For unit tests, the best practice is to have a separate test class from the `AzureTestCase` class which tests client side validation methods. For example, the `azure-data-tables` library has client-side validation for the table name and properties of the entity, below is an example of how these could be tested: + +```python +import pytest +from azure.data.tables import TableServiceClient, EntityProperty, EdmType + +class TestTablesUnitTest(object): + + def test_invalid_table_name(self): + account_name = 'fake_account_name' + account_key = 'fake_account_key1234567890' + tsc = TableServiceClient( + account_url='https://{}.table.core.windows.net/'.format(account_name), + credential=account_key + ) + + invalid_table_name = "bad_table_name" # table name cannot have an '_' character + + with pytest.raises(ValueError): + tsc.create_table(invalid_table_name) + + def test_entity_properties(self): + ep = EntityProperty('abc', EdmType.STRING) + ep = EntityProperty(b'abc', EdmType.BINARY) + ep = EntityProperty(1.2345, EdmType.DOUBLE) + + with pytest.raises(ValueError): + ep = EntityProperty(2 ** 75, EdmType.Int64) # Tables can only handle integers up to 2 ^ 63 +``` + +Async tests need to be marked with a `@pytest.mark.asyncio` to be properly handled. For example: +```python +import pytest +from azure.data.tables.aio import TableServiceClient + +class TestTablesUnitTest(object): + + @pytest.mark.asyncio + async def test_invalid_table_name(self): + account_name = 'fake_account_name' + account_key = 'fake_account_key1234567890' + tsc = TableServiceClient( + account_url='https://{}.table.core.windows.net/'.format(account_name), + credential=account_key + ) + + invalid_table_name = "bad_table_name" # table name cannot have an '_' character + + with pytest.raises(ValueError): + await tsc.create_table(invalid_table_name) +``` + + +## More Test Examples + +This section will demonstrate how to write tests with the `devtools_testutils` package with a few samples to showcase the features of the test framework. + +For more information, refer to the [advanced tests notes][advanced_tests_notes] on more advanced scenarios and additional information. + + + +[advanced_tests_notes]: https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/tests-advanced.md +[azure_devtools]: https://pypi.org/project/azure-devtools/ +[engsys_wiki]: https://dev.azure.com/azure-sdk/internal/_wiki/wikis/internal.wiki/48/Create-a-new-Live-Test-pipeline?anchor=test-resources.json +[mgmt_settings_fake]: https://github.com/Azure/azure-sdk-for-python/blob/master/tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py +[packaging]: https://github.com/Azure/azure-sdk-for-python/blob/master/doc/dev/packaging.md diff --git a/doc/eng_sys_checks.md b/doc/eng_sys_checks.md new file mode 100644 index 000000000000..ed1e934530b2 --- /dev/null +++ b/doc/eng_sys_checks.md @@ -0,0 +1,211 @@ +# Azure SDK for Python - Engineering System + +There are various tests currently enabled in Azure pipeline for Python SDK and some of them are enabled only for nightly CI checks. We also run some static analysis tool to verify code completeness, security and lint check. + +Check the [contributing guide](https://github.com/Azure/azure-sdk-for-python/blob/master/CONTRIBUTING.md#building-and-testing) for an intro to `tox`. + +As a contributor, you will see the build jobs run in two modes: `Nightly Scheduled` and `Pull Request`. + +These utilize the _same build definition_, except that the `nightly` builds run additional, deeper checks that run for a bit longer. + +Example PR build: + +![res/job_snippet.png](res/job_snippet.png) + + - `Analyze` tox envs run during the `Analyze job. + - `Test _` runs PR/Nightly tox envs, depending on context. + +## Skipping a tox test environment at queue time + +All build definitions allow choice at queue time as to which `tox` environments actually run during the test phase. + +1. Find your target service `internal` build. +2. Click `Run New` +3. Before clicking `run` against `master` or your target commit, click `Variables` and add a variable. The value should be a comma separated list of tox environments that you want to run in the test phase. +4. Once it's set, run the build! + +This is an example setting of that narrows the default set from `whl, sdist, depends, latestdependency, minimumdependency`. + +![res/queue_time_variable.png](res/queue_time_variable.png) + +Any combination of valid valid tox environments will work. Reference either this document or the file present at `eng/tox/tox.ini` to find what options are available. + +## Analyze Checks +Analyze job in both nightly CI and pull request validation pipeline runs a set of static analysis using external and internal tools. Following are the list of these static analysis. + +#### MyPy +`Mypy` is a static analysis tool that runs type checking of python package. Following are the steps to run `MyPy` locally for a specific package +1. Go to root of the package +2. Execute following command + ```tox -e mypy -c ../../../eng/tox/tox.ini ``` + +#### Pylint +`Pylint` is a static analysis tool to run lint checking. Following are the steps to run `pylint` locally for a specific package. + +1. Go to root of the package. +2. Execute following command + ```tox -e pylint -c ../../../eng/tox/tox.ini``` + + +#### Bandit +`Bandit` is static security analysis tool. This check is triggered for all Azure SDK package as part of analyze job. Following are the steps to `Bandit` tool locally for a specific package. + +1. Got to package root directory. +2. Execute following command + ```tox -e bandit -c ../../../eng/tox/tox.ini``` + + +#### ApiStubGen +`ApiStubGen` is an internal tool used to create API stub to help reviewing public APIs in our SDK package using [`APIViewTool`.](https://apiview.dev/) This tool also has some built in lint checks available and purpose of having this step in analyze job is to ensure any change in code is not impacting stubbing process and also to have more and more custom lint checks added in future. + +#### Change log verification + +Change log verification is added to ensure package has valid change log for current version. Guidelines to properly maintain the change log is documented [here](https://github.com/Azure/azure-sdk-for-python/blob/master/doc/) + +## PR Validation Checks +Each pull request runs various tests using `pytest` in addition to all the tests mentioned above in analyze check. Pull request validation performs 3 different types of test: `whl, sdist and depends`. The following section explains the purpose of each of these tests and how to execute them locally. All pull requests are validated on multiple python versions across different platforms. Find the test matrix below. + + +|`Python Version`|`Platform` | +|--|--| +|2.7|Linux| +|3.5|Windows| +|3.8|Linux| + +### PR validation tox test environments +Tests are executed using tox environment and following are the tox test names that are part of pull request validation +#### whl +This test installs wheel of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this test environment locally. + +1. Go to package root folder on a command line +2. Run following command + ``tox -e whl -c ../../../eng/tox/tox.ini`` + +#### sdist +This test installs sdist of the package being tested and runs all tests cases in the package using `pytest`. Following is the command to run this test environment locally. + +1. Go to package root folder on a command line +2. Run following command + ``tox -e sdist -c ../../../eng/tox/tox.ini`` + +#### depends +The `depends` check ensures all modules in a target package can be successfully imported. Actually installing and importing will verify that all package requirements are properly set in setup.py and that the `__all__` set for the package is properly defined. This test installs the package and its required packages, then executes `from import *`. For example from `azure-core`, the following would be invoked: `from azure.core import *`. + +Following is the command to run this test environment locally. + +1. Go to package root folder on a command line +2. Run following command + ``tox -e sdist -c ../../../eng/tox/tox.ini`` + + +## Nightly CI Checks + +Nightly continuous integration checks run all tests mentioned above in Analyze and Pull request checks in addition to multiple other tests. Nightly CI checks run on all python versions that are supported by Azure SDK packages across multiple platforms. + +![res/full_matrix.png](res/full_matrix.png) + +Regression also executes: +![res/regression.png](res/regression.png) + +Nightly CI check runs following additional tests to ensure the dependency between a package being developed against released packages to ensure backward compatibility. Following is the explanation of why we need dependency tests to ensure backward compatibility. + +Imagine a situation where package `XYZ` requires another package `ABC` and as per the package requirement of `XYZ`, It should work with any version between 1.0 and 2.0 of package `ABC`. + +Package `XYZ` requires package `ABC` + +As a developer of package `XYZ`, we need to ensure that our package works fine with all versions of ABC as long as it is within package requirement specification. + +Another scenario where regression test( reverse dependency) is required. Let's take same example above and assume we are developers of package `ABC` which is taken as required package by another package `XYZ` + +Package `ABC is required by package `XYZ` + + +As a developer of `ABC`, we need to ensure that any new change in `ABC` is not breaking the use of `XYZ` and hence ensures backward compatibility. + +Let's take few Azure SDK packages instead of dummy names to explain this in a context we are more familiar of. + +Most of the Azure SDK packages require `azure-core` and this requirement is within a range for e.g. `azure-storage-blob` that requires `azure-core >1.0.0, <2.0.0`. So any new change in azure-storage-blob needs to make sure it works fine with all versions of azure-core between 1.0.0 and 2.0.0(Both included). +Similarly any new version of azure-core needs to ensure that it is still compatible with all released package versions which takes azure-core as required package. + +It is lot of combinations if we need to run tests for all released versions within the range of requirement specification. In order to reduce the test matrix and at the same time ensures the quality, we currently run the test using oldest released and latest released packages and skips any version in between. + +Following are the additional tests we run during nightly CI checks. + +#### Latest Dependency Test + +This test makes sure that a package being developed works absolutely fine using latest released version of required Azure SDK package as long as there is a released version which satisfies the requirement specification. Workflow of this test is as follows: + +1. Identify if any azure SDK package is marked as required package in setup.py of current package being tested. +Note: Any dependency mentioned only in dev_requirements are not considered to identify dependency. +2. Identify latest released version of required azure sdk package on PyPI +3. Install latest released version of required package instead of dev dependency to package in code repo +4. Install current package that is being tested +5. Run pytest of all test cases in current package + +Tox name of this test is `latestdependency` and steps to manually run this test locally is as follows. +1. Go to package root. For e.g azure-storage-blob or azure-identity +2. Run following command + `Tox –e latestdependency –c ../../../tox/tox.ini` + + +#### Minimum Dependency Test + +This test makes sure that a package being developed works absolutely fine using oldest released version of required Azure SDK package as long as there is a released version which satisfies the requirement specification. Workflow of this test is as follows: + +1. Identify if any azure SDK package is marked as required package in setup.py of current package being tested. +Note: Any dependency mentioned only in dev_requirements are not considered to identify dependency. +2. Identify oldest released version of required azure sdk package on PyPI +3. Install oldest released version of required package instead of dev dependency to package in code repo +4. Install current package that is being tested +5. Run pytest of all test cases in current package + +Tox name of this test is `mindependency` and steps to manually run this test locally is as follows. +1. Go to package root. For e.g azure-storage-blob or azure-identity +2. Run following command +`Tox –e mindependency –c ../../../tox/tox.ini` + + +#### Regression Test + +As mentioned earlier, regression test or reverse dependency test is added to avoid a regression scenario for customers when any new change is made in a package that is required by other packages. Currently we have only very few Azure SDK packages that are added as required package by other Azure SDK package. As of now, list of these required packages are: +`azure-core` +`azure-eventhub` +`azure-storage-blob` + +Our regression framework automatically finds any such package that is added as required package so this list is not hardcoded. + +We have two different set of regression tests to verify regression scenarios against oldest and latest released dependent packages. +• Regression using latest released dependent package +• Regression using oldest released dependent package + +One main difference between regression tests and forward dependency test( latest and mindependency) is in terms of what test cases are executed as part of the tests. While forward dependency tests executes the test cases in current code repo, regression tests execute the tests that were part of repo at the time of dependent package release. To make it more clear, let's look at an example here. + +Let's assume that we are testing regression for azure-core and this test is for regression against latest released dependent packages. Test will identify all packages that takes azure-core as required package and finds latest released version of those packages. Test framework install currently being developed azure-core and latest released dependent package and runs the test cases in dependent package, for e.g. azure-identity, that were part of repo at the time of releasing depending package. + +Workflow of this test is as follows when running regression for an SDK package. +1. Identify any packages that takes currently being tested package as required package +2. Find latest and oldest released versions of dependent package from PyPI +3. Install currently being developed version of package we are testing regression for. E.g. azure-core +4. Checkout the release tag of dependent package from github +5. Install latest/oldest version of dependent package. For e.g. azure-identity +6. Run test cases within dependent package from checked out branch. + + +Steps to manually run regression test locally: +1. Run below command from your git code repo to generate the wheel of package being developed. Currently we have restricted to have prebuilt wheel. +`./scripts/devops_tasks/build_packages.py --service= -d ` +2. Run below command to start regression test locally +`./scripts/devops_tasks/test_regression.py azure-* --service= --whl-dir=` + + +How to run these additional tests on azure pipelines manually + +Following variables can be set at queueing time in order to run these additional tests which are by default run only for scheduled runs. + +• Latest and oldest dependency test in addition to basic testing +Variable name: `Run.DependencyTest` +Value: true + +• Regression test +Variable name: `Run.Regression` +Value: true diff --git a/doc/res/full_matrix.png b/doc/res/full_matrix.png new file mode 100644 index 000000000000..7decc5e8cd01 Binary files /dev/null and b/doc/res/full_matrix.png differ diff --git a/doc/res/job_snippet.png b/doc/res/job_snippet.png new file mode 100644 index 000000000000..e4d7c47a391a Binary files /dev/null and b/doc/res/job_snippet.png differ diff --git a/doc/res/queue_time_variable.png b/doc/res/queue_time_variable.png new file mode 100644 index 000000000000..78184863b666 Binary files /dev/null and b/doc/res/queue_time_variable.png differ diff --git a/doc/res/regression.png b/doc/res/regression.png new file mode 100644 index 000000000000..747008591b5a Binary files /dev/null and b/doc/res/regression.png differ diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py index 346db4d9a265..cc7cfc28f841 100644 --- a/doc/sphinx/conf.py +++ b/doc/sphinx/conf.py @@ -67,8 +67,8 @@ 'trio': ('https://trio.readthedocs.io/en/stable/', None), 'msal': ('https://msal-python.readthedocs.io/en/latest/', None), # Azure packages - 'azure-core': ('https://azuresdkdocs.blob.core.windows.net/$web/python/azure-core/1.1.1/', None), - 'azure-identity': ('https://azuresdkdocs.blob.core.windows.net/$web/python/azure-identity/1.1.0/', None), + 'azure-core': ('https://azuresdkdocs.blob.core.windows.net/$web/python/azure-core/latest/', None), + 'azure-identity': ('https://azuresdkdocs.blob.core.windows.net/$web/python/azure-identity/latest/', None), } autodoc_member_order = 'groupwise' diff --git a/doc/sphinx/index.rst b/doc/sphinx/index.rst index cdca4ef41e72..e426a8fcc2d3 100644 --- a/doc/sphinx/index.rst +++ b/doc/sphinx/index.rst @@ -65,6 +65,8 @@ section of the project. installation quickstart_authentication + mgmt_quickstart + python_mgmt_migration_guide multicloud exceptions Service Management (Legacy) diff --git a/doc/sphinx/mgmt_quickstart.rst b/doc/sphinx/mgmt_quickstart.rst new file mode 100644 index 000000000000..98002433ec09 --- /dev/null +++ b/doc/sphinx/mgmt_quickstart.rst @@ -0,0 +1,271 @@ +Quickstart Tutorial - Resource Management +=============================================================== + +We are excited to announce that a new set of management libraries are now generally available. +Those packages share a number of new features such as Azure Identity support, +HTTP pipeline, error-handling.,etc, and they also follow the new Azure SDK guidelines which +create easy-to-use APIs that are idiomatic, compatible, and dependable. + +You can find the details of those new libraries `here `__ + +In this basic quickstart guide, we will walk you through how to +authenticate to Azure using the new libraries and start interacting with +Azure resources. There are several possible approaches to +authentication. This document illustrates the most common scenario + +Migration Guide +--------------- +If you are an existing user of the older version of Azure management library for Python and you are looking for a migration guide to the new version of the SDK, please refer to `this migration guide here `__ + +Prerequisites +------------- + +| You will need the following values to authenticate to Azure + +- **Subscription ID** +- **Client ID** +- **Client Secret** +- **Tenant ID** + +These values can be obtained from the portal, here's the instructions: + +Get Subscription ID +^^^^^^^^^^^^^^^^^^^ + +1. Login into your Azure account +2. Select Subscriptions in the left sidebar +3. Select whichever subscription is needed +4. Click on Overview +5. Copy the Subscription ID + +Get Client ID / Client Secret / Tenant ID +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For information on how to get Client ID, Client Secret, and Tenant ID, please refer to `this document `__ + +Setting Environment Variables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After you obtained the values, you need to set the following values as +your environment variables + +- ``AZURE_CLIENT_ID`` +- ``AZURE_CLIENT_SECRET`` +- ``AZURE_TENANT_ID`` +- ``AZURE_SUBSCRIPTION_ID`` + +To set the following environment variables on your development system: + +Windows (Note: Administrator access is required) + +1. Open the Control Panel +2. Click System Security, then System +3. Click Advanced system settings on the left +4. Inside the System Properties window, click the Environment Variables… button. +5. Click on the property you would like to change, then click the Edit… button. If the property name is not listed, then click the New… button. + +Linux-based OS +:: + + export AZURE_CLIENT_ID="__CLIENT_ID__" + export AZURE_CLIENT_SECRET="__CLIENT_SECRET__" + export AZURE_TENANT_ID="__TENANT_ID__" + export AZURE_SUBSCRIPTION_ID="__SUBSCRIPTION_ID__" + +Authentication and Creating Management Client +------------------------------------------------------ + +Now that the environment is setup, all you need to do is to create an +authenticated client. Our default option is to use +**DefaultAzureCredential** and in this guide we have picked +**Resources** as our target service, but you can set it up similarly for any other service that you are using. + +To authenticate to Azure and create +a management client, simply do the following: + +:: + + import azure.mgmt.resource + import azure.mgmt.network + from azure.identity import DefaultAzureCredential + ... + subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID") + credential = DefaultAzureCredential() + resource_client = azure.mgmt.resource.ResourceManagementClient(credential=credential, subscription_id=subscription_id) + network_client = azure.mgmt.network.NetworkManagementClient(credential=credential, subscription_id=subscription_id) + +More detailed information and different authentication approaches using Azure Identity can be found in +`this document `__ + +Managing Resources +------------------ + +Now that we are authenticated, we can use the Resource client (azure.mgmt.resource.ResourceManagementClient) we have created to perform operations on Resource Group. In this example, we will show to manage Resource Groups. + +**Create a resource group** + +:: + + location = "westus2" + group_name = "my_resource_group_name" + group = resource_client.resource_groups.create_or_update( + group_name, + {'location': location} + ) + +**Update a resource group** + +:: + + group_name = "my_resource_group_name" + group.tags = { + "environment":"test", + "department":"tech" + } + updated_group = resource_client.resource_groups.create_or_update(group_name, group) + +**List all resource groups** + +:: + + group_list = self.resource_client.resource_groups.list() + for g in group_list: + print_resource_group(g) + +**Delete a resource group** + +:: + + delete_async_op = resource_client.resource_groups.begin_delete(group_name) + delete_async_op.wait() + +Managing Network +------------------ +We can use the Network client (azure.mgmt.resource.NetworkManagementClient) we have created to perform operations on Network related resources. In this example, we will show how to manage Public IP Addresses. + +**Create a Network Public IP Address** + +:: + + GROUP_NAME = "testgroup" + PUBLIC_IP_ADDRESS = "public_ip_address_name" + + # Create Resource Group + resource_client.resource_groups.create_or_update( + GROUP_NAME, + {"location": "eastus"} + ) + + # Create Public IP Address + public_ip_address = network_client.public_ip_addresses.begin_create_or_update( + GROUP_NAME, + PUBLIC_IP_ADDRESS, + { + "location": "eastus" + } + ).result() + print("Create Public IP Address:\n{}".format(public_ip_address)) + +**Get a Network Public IP Address** + +:: + + public_ip_address = network_client.public_ip_addresses.get( + GROUP_NAME, + PUBLIC_IP_ADDRESS + ) + print("Get Public IP Address:\n{}".format(public_ip_address)) + +**Update tags in Network Public IP Address** + +:: + + # Update Public IP Address + public_ip_address = network_client.public_ip_addresses.update_tags( + GROUP_NAME, + PUBLIC_IP_ADDRESS, + { + "tags": { + "tag1": "value1", + "tag2": "value2" + } + } + ) + print("Updated Public IP Address \n{}".format(public_ip_address)) + +**Delete a Network Public IP Address** + +:: + + # Delete Public IP Address + public_ip_address = network_client.public_ip_addresses.begin_delete( + GROUP_NAME, + PUBLIC_IP_ADDRESS + ).result() + print("Delete Public IP Address.\n") + +Async and sync operations +------------------------- +In python>=3.5, Azure Python SDK provides the choice for user to use the asynchronous client for asynchronous programming. + +Note that asyncio in Windows is underpowered and please take caution when using async operations on Windows systems + +**Create Async Management Client** +:: + + from azure.identity.aio import DefaultAzureCredential + from azure.mgmt.network.aio import NetworkManagementClient + from azure.mgmt.resource.resources.aio import ResourceManagementClient + + SUBSCRIPTION_ID = os.environ.get("SUBSCRIPTION_ID", None) + credential = DefaultAzureCredential() + resource_client = ResourceManagementClient( + credential=credential, + subscription_id=SUBSCRIPTION_ID + ) + network_client = NetworkManagementClient( + credential=credential, + subscription_id=SUBSCRIPTION_ID + ) + +**Create a Network Public IP Address Async** +:: + + GROUP_NAME = "testgroup" + PUBLIC_IP_ADDRESS = "public_ip_address_name" + + # Create Resource Group + await resource_client.resource_groups.create_or_update( + GROUP_NAME, + {"location": "eastus"} + ) + + # Create Public IP Address + async_poller = await network_client.public_ip_addresses.begin_create_or_update( + GROUP_NAME, + PUBLIC_IP_ADDRESS, + { + "location": "eastus" + } + ) + public_ip_address = await async_poller.result() + print("Create Public IP Address:\n{}".format(public_ip_address)) + +Code Samples +------------------------- +For more code samples that demonstrate how to use our SDK to interact with Azure services, please visit `here `__. You can also view the Github repo that contains the code samples `here `__ + +Need help? +---------- +- File an issue via `Github Issues `__ +- Check `previous questions `__ or ask new ones on StackOverflow using azure and python tags. + +Contributing +------------ +For details on contributing to this repository, see the contributing guide. + +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repositories using our CLA. + +This project has adopted the Microsoft Open Source Code of Conduct. For more information see the Code of Conduct FAQ or contact opencode@microsoft.com with any additional questions or comments. diff --git a/doc/sphinx/package_service_mapping.json b/doc/sphinx/package_service_mapping.json index 659cca5c6a4e..aaa3c2ed9e38 100644 --- a/doc/sphinx/package_service_mapping.json +++ b/doc/sphinx/package_service_mapping.json @@ -159,6 +159,26 @@ "category": "Other", "service_name": "Other" }, + "azure-communication-administration": { + "category": "Client", + "service_name": "Communication", + "manually_generated": true + }, + "azure-communication-chat": { + "category": "Client", + "service_name": "Communication", + "manually_generated": true + }, + "azure-communication-sms": { + "category": "Client", + "service_name": "Communication", + "manually_generated": true + }, + "azure-mgmt-communication": { + "category": "Management", + "service_name": "Communication", + "manually_generated": true + }, "azure-eventgrid": { "category": "Client", "namespaces": [ @@ -896,11 +916,21 @@ "service_name": "Form Recognizer", "manually_generated": true }, + "azure-ai-metricsadvisor": { + "category": "Client", + "service_name": "Metrics Advisor", + "manually_generated": true + }, "azure-storage-blob": { "category": "Client", "service_name": "Storage", "manually_generated": true }, + "azure-storage-blob-changefeed": { + "category": "Client", + "service_name": "Storage", + "manually_generated": true + }, "azure-storage-file-share": { "category": "Client", "service_name": "Storage", diff --git a/doc/sphinx/python_mgmt_migration_guide.rst b/doc/sphinx/python_mgmt_migration_guide.rst new file mode 100644 index 000000000000..1856a17a3908 --- /dev/null +++ b/doc/sphinx/python_mgmt_migration_guide.rst @@ -0,0 +1,170 @@ +Migration Guide - Resource Management +===================================== + +Guide for migrating to the next generation of Azure Python SDK for Management Libraries +--------------------------------------------------------------------------------------- + +This document is intended for users that are familiar with an older +version of the Python SDK for managment libraries and wish to migrate +their application to the next version of Azure resource management +libraries + +For users new to the Python SDK for resource management libraries, +please see the `quickstart +guide `__ + +Table of contents +----------------- + +- `Prerequisites <#prerequisites>`__ +- `Updated Python Packages <#updated-python-packages>`__ +- `General Changes <#general-changes>`__ +- `Authentication <#authentication>`__ +- `Client API Changes <#client-api-changes>`__ +- `Additional Samples <#additional-samples>`__ + +Prerequisites +------------- + +- Active Azure subscription +- Python 2.7 or 3.5+ + +Updated Python Packages +----------------------- + +You can refer to the `this +site `__ +to see all the new Python packages. + +For packages that are already generally available (GA), you can directly install the package using pip. Take Azure Compute +service for example, simply do: +``pip install azure-mgmt-compute`` + +The release history for azure-mgmt-compute can be found at `https://pypi.org/project/azure-mgmt-compute/#history `__ + +You will notice that there was a beta release in release history, and the changelog for this version mentioned that "this version uses a next-generation code generator that introduces important breaking changes". This indicates the package is based on the new generator. + +In addition, some next-generation Python SDK management client libraries might still in Public Preview. The preview version SDK will be contain a ``b`` in its version to number to indicate that it's a beta release (e.g.``10.0.0b1``). + +For those beta releases, please install the package based on the beta version number, for +example, to install the latest preview package for BetaServiceExample, please use: +``pip install azure-mgmt-beta-service-example==10.0.0b1`` + +General Changes +--------------- + +The latest Azure Python SDK for management libraries is a result of our +efforts to create a resource management client library that is +user-friendly and idiomatic to the Python ecosystem. + +While conforming to the `new Azure SDK Design Guidelines for +Python `__, +we have tried our best to minimize the breaking changes. Most of the API +signatures have stayed the same to offer user an easier migration +experience. + +The important breaking changes are listed in the following sections: + +Authentication +~~~~~~~~~~~~~~ + +In old version, ``ServicePrincipalCredentials`` in ``azure.common`` is +used for authenticating to Azure and creating a service client + +In new version, in order to provide an unified authentication based on +Azure Identity for all Azure SDKs, the authentication mechanism has been +re-designed and replaced by ``azure-identity`` library + +To use the new ``azure-identity`` authentication mechanism, please use +``pip install azure-identity`` to install the package + +To the show the code snippets for the change: + +**In old version** + +.. code:: python + + import azure.mgmt.compute + from azure.common.credentials import ServicePrincipalCredentials + + credentials = ServicePrincipalCredentials( + client_id=client_id, + secret=client_secret, + tenant=tenant_id + ) + compute_client = azure.mgmt.compute.ComputeManagementClient(credentials=credentials, subscription_id=self.subscription_id) + +**Equivalent in new version** + +.. code:: python + + import azure.mgmt.compute + from azure.identity import ClientSecretCredential + + credential = ClientSecretCredential( + client_secret=client_secret, + client_id=client_id, + client_secret=client_secret + ) + compute_client = azure.mgmt.compute.ComputeManagementClient(credential=credential, subscription_id=self.subscription_id) + +For detailed information on the benefits of using the new authentication +classes as well as all available authentication options, please refer to `this +page `__ + +Client API Changes +------------------ + +Most of the API has stayed the same to provide an easier migration +experience. There is a minor change regarding the async operations + +Async Operations Change +~~~~~~~~~~~~~~~~~~~~~~~ + +To differentiate between asynchronous and synchronous API operations in +the new version, an explicit ``begin_`` prefix is added for all the +async APIs operations (this includes operations where the user gets a +``202`` response code or needs to call ``.result()`` explicitly to get +the response) + +To show an example (creating virtual machine): + +**In old version** + +.. code:: python + + result = self.compute_client.virtual_machines.create_or_update( + group_name, + vm_name, + parameters + ) + result = result.result() + +**Equivalent in new version** + +.. code:: python + + result = self.compute_client.virtual_machines.begin_create_or_update( + group_name, + vm_name, + parameters + ) + vm = result.result() + +Additional Samples +------------------ + +More samples can be found at : + +- `Quickstart for new version of SDK `__ +- `Code Samples for Resource Management Libraries `__ +- `Authentication Documentation `__ + +Need help? +---------- + +If you have encountered an issue during migration, please file an issue +via `Github +Issues `__ and +make sure you add the "Preview" label to the issue + diff --git a/eng/.docsettings.yml b/eng/.docsettings.yml index 6fa386df06c5..df5122f83d4b 100644 --- a/eng/.docsettings.yml +++ b/eng/.docsettings.yml @@ -8,6 +8,8 @@ omitted_paths: - doc/* - sdk/**/samples/* - sdk/identity/azure-identity/tests/* + - sdk/**/tests/perfstress_tests/* + language: python root_check_enabled: True required_readme_sections: @@ -46,7 +48,12 @@ known_content_issues: - ['sdk/synapse/azure-synapse-accesscontrol/README.md', '#4554'] - ['sdk/synapse/azure-synapse-spark/README.md', '#4554'] - ['sdk/synapse/azure-synapse-artifacts/README.md', '#4554'] + - ['sdk/synapse/azure-synapse-monitoring/README.md', '#4554'] + - ['sdk/synapse/azure-synapse-managedprivateendpoints/README.md', '#4554'] - ['sdk/synapse/azure-synapse-nspkg/README.md', '#4554'] + - ['sdk/security/azure-security-nspkg/README.md', '#4554'] + - ['sdk/anomalydetector/azure-ai-anomalydetector/README.md', '#4554'] + - ['sdk/metricsadvisor/azure-ai-metricsadvisor/README.md', '#4554'] - ['sdk/applicationinsights/azure-applicationinsights/README.md', '#4554'] - ['sdk/batch/azure-batch/README.md', '#4554'] - ['sdk/cognitiveservices/azure-cognitiveservices-anomalydetector/README.md', '#4554'] @@ -81,8 +88,10 @@ known_content_issues: - ['sdk/eventgrid/azure-eventgrid/README.md', '#4554'] - ['sdk/graphrbac/azure-graphrbac/README.md', '#4554'] - ['sdk/loganalytics/azure-loganalytics/README.md', '#4554'] + - ['sdk/schemaregistry/azure-schemaregistry/swagger/README.md', '#4554'] - ['sdk/servicebus/azure-servicebus/README.md', '#4554'] - ['sdk/servicebus/azure-servicebus/swagger/README.md', '#4554'] + - ['sdk/servicebus/azure-servicebus/tests/perf_tests/README.md', '#4554'] - ['sdk/servicefabric/azure-servicefabric/README.md', '#4554'] - ['sdk/storage/azure-storage-nspkg/README.md', '#4554'] - ['sdk/storage/azure-storage-blob/swagger/README.md', '#4554'] @@ -93,9 +102,15 @@ known_content_issues: - ['sdk/storage/azure-storage-queue/swagger/README.md', '#4554'] - ['sdk/storage/README.md', '#4554'] - ['sdk/textanalytics/azure-ai-textanalytics/samples/README.md', '#4554'] + - ['sdk/monitor/azure-monitor-opentelemetry-exporter/swagger/README.md', '#4554'] + - ['sdk/monitor/azure-monitor-opentelemetry-exporter/README.md', '#4554'] + - ['sdk/digitaltwins/azure-digitaltwins-core/swagger/README.md', '#4554'] + - ['sdk/textanalytics/azure-ai-textanalytics/swagger/README.md', '#4554'] + - ['sdk/media/azure-media-nspkg/README.md', '#4554'] + - ['sdk/containerregistry/azure-containerregistry/swagger/README.md', '#4554'] # nspckg and common. - - ['sdk/appconfiguration/azure-appconfiguration/README.md', 'nspkg and common'] + - ['sdk/appconfiguration/azure-appconfiguration/README.md', 'nspkg and common'] - ['sdk/appconfiguration/azure-appconfiguration/samples/README.md', 'nspkg and common'] - ['sdk/cognitiveservices/azure-cognitiveservices-knowledge-nspkg/README.rst', 'nspkg and common'] - ['sdk/cognitiveservices/azure-cognitiveservices-language-nspkg/README.rst', 'nspkg and common'] @@ -111,18 +126,24 @@ known_content_issues: - ['sdk/core/azure-mgmt-nspkg/README.rst', 'nspkg and common'] - ['sdk/core/azure-nspkg/README.rst', 'nspkg and common'] - ['sdk/keyvault/azure-keyvault-nspkg/README.md', 'nspkg and common'] + - ['sdk/mixedreality/azure-mixedreality-nspkg/README.md', 'nspkg and common'] - ['sdk/search/azure-search-nspkg/README.md', 'nspkg and common'] + - ['sdk/search/azure-search-documents/README.md', 'nspkg and common'] - ['sdk/storage/azure-storage-blob/samples/README.md', 'nspkg and common'] - ['sdk/storage/azure-storage-file-datalake/samples/README.md', 'nspkg and common'] - ['sdk/storage/azure-storage-blob-changefeed/samples/README.md', 'nspkg and common'] - ['sdk/storage/azure-storage-file-share/samples/README.md', 'nspkg and common'] - ['sdk/storage/azure-storage-queue/samples/README.md', 'nspkg and common'] - ['sdk/textanalytics/azure-ai-nspkg/README.md', 'nspkg and common'] + - ['sdk/translation/azure-ai-translation-nspkg/README.md', 'nspkg and common'] - ['sdk/eventhub/azure-eventhub/samples/README.md', 'nspkg and common'] - ['sdk/tables/azure-data-nspkg/README.md', 'nspkg and common'] - -# HISTORY.rst - - ['sdk/core/azure/HISTORY.rst','#4554'] + - ['sdk/cosmos/README.md', 'nspkg and common'] + - ['sdk/communication/azure-communication-nspkg/README.md', 'nspkg and common'] + - ['sdk/iot/azure-iot-nspkg/README.md', 'nspkg and common'] + # HISTORY.rst + - ['sdk/core/azure/HISTORY.rst','nspkg and common'] + - ['sdk/digitaltwins/azure-digitaltwins-nspkg/README.md', 'nspkg and common'] # root readme - ['README.md', 'root readme'] diff --git a/eng/CHECKENFORCER b/eng/CHECKENFORCER index 7b519b060425..ee7ed7854a57 100644 --- a/eng/CHECKENFORCER +++ b/eng/CHECKENFORCER @@ -1,2 +1,58 @@ format: v0.1-alpha minimumCheckRuns: 1 +timeout: 5 +message: > + This pull request is protected by [Check Enforcer](https://aka.ms/azsdk/check-enforcer). + + # What is Check Enforcer? + + Check Enforcer helps ensure all pull requests are covered by at least one + check-run (typically an Azure Pipeline). When all check-runs associated + with this pull request pass then Check Enforcer itself will pass. + + # Why am I getting this message? + + You are getting this message because Check Enforcer did not detect any + check-runs being associated with this pull request within five minutes. This + may indicate that your pull request is not covered by any pipelines and so + Check Enforcer is correctly blocking the pull request being merged. + + # What should I do now? + + If the **check-enforcer** check-run is not passing and all other check-runs + associated with this PR are passing (excluding _license-cla_) then you could + try telling _Check Enforcer_ to evaluate your pull request again. You can + do this by adding a comment to this pull request as follows: + + ``` + /check-enforcer evaluate + ``` + + Typically evaulation only takes a few seconds. If you know that your pull + request is not covered by a pipeline and this is expected you can override + Check Enforcer using the following command: + + ``` + /check-enforcer override + ``` + + Note that using the override command triggers alerts so that follow-up + investigations can occur (PRs still need to be approved as normal). + + # What if I am onboarding a new service? + + Often, new services do not have validation pipelines associated with them, + in order to bootstrap pipelines for a new service, you can issue the following + command as a pull request comment: + + ``` + /azp run prepare-pipelines + ``` + + This will run a pipeline that analyzes the source tree and creates the + pipelines necessary to build and validate your pull request. Once the pipeline + has been created you can trigger the pipeline using the following comment: + + ``` + /azp run python - [service] - ci + ``` \ No newline at end of file diff --git a/eng/CredScanSuppression.json b/eng/CredScanSuppression.json new file mode 100644 index 000000000000..eb3ff74bcd31 --- /dev/null +++ b/eng/CredScanSuppression.json @@ -0,0 +1,48 @@ +{ + "tool": "Credential Scanner", + "suppressions": [ + { + "placeholder": [ + "XxxxXxxxXXXxxxXXXXxxxxXXxxxXxxx", + "HusH_Sec4et", + "myPassw0rd", + "myPassw0rd3", + "SWsSsd__233$Sdsds#%Sd!", + "kt#_gahr!@aGERDXA", + "Aa1!zyx_", + "Aa!1()-xyz", + "000000000000000000000000000000000000000000000000000", + "UsernameAndPassword", + "123456", + "Password1!", + "NewPassword1!", + "MyStr0ngP4ssword", + "secret_password", + "secret", + "123", + "connectionstring", + "123_abc", + "pa$$w0rd", + "Password1!!!", + "Un53cuRE!", + "SecretBase", + "nodesdk", + "p@55wOrd", + "pass$w0rd", + "MIIJOwIBAzCCCPcGCSqGSIb3DQEHAaCCCOgEggjkMIII4DCCBgkGCSqGSIb3DQEHAaCCBfoEggX2MIIF8jCCBe4GCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAj15YH9pOE58AICB9AEggTYLrI+SAru2dBZRQRlJY7XQ3LeLkah2FcRR3dATDshZ2h0IA2oBrkQIdsLyAAWZ32qYR1qkWxLHn9AqXgu27AEbOk35+pITZaiy63YYBkkpR+pDdngZt19Z0PWrGwHEq5z6BHS2GLyyN8SSOCbdzCz7blj3+7IZYoMj4WOPgOm/tQ6U44SFWek46QwN2zeA4i97v7ftNNns27ms52jqfhOvTA9c/wyfZKAY4aKJfYYUmycKjnnRl012ldS2lOkASFt+lu4QCa72IY6ePtRudPCvmzRv2pkLYS6z3cI7omT8nHP3DymNOqLbFqr5O2M1ZYaLC63Q3xt3eVvbcPh3N08D1hHkhz/KDTvkRAQpvrW8ISKmgDdmzN55Pe55xHfSWGB7gPw8sZea57IxFzWHTK2yvTslooWoosmGxanYY2IG/no3EbPOWDKjPZ4ilYJe5JJ2immlxPz+2e2EOCKpDI+7fzQcRz3PTd3BK+budZ8aXX8aW/lOgKS8WmxZoKnOJBNWeTNWQFugmktXfdPHAdxMhjUXqeGQd8wTvZ4EzQNNafovwkI7IV/ZYoa++RGofVR3ZbRSiBNF6TDj/qXFt0wN/CQnsGAmQAGNiN+D4mY7i25dtTu/Jc7OxLdhAUFpHyJpyrYWLfvOiS5WYBeEDHkiPUa/8eZSPA3MXWZR1RiuDvuNqMjct1SSwdXADTtF68l/US1ksU657+XSC+6ly1A/upz+X71+C4Ho6W0751j5ZMT6xKjGh5pee7MVuduxIzXjWIy3YSd0fIT3U0A5NLEvJ9rfkx6JiHjRLx6V1tqsrtT6BsGtmCQR1UCJPLqsKVDvAINx3cPA/CGqr5OX2BGZlAihGmN6n7gv8w4O0k0LPTAe5YefgXN3m9pE867N31GtHVZaJ/UVgDNYS2jused4rw76ZWN41akx2QN0JSeMJqHXqVz6AKfz8ICS/dFnEGyBNpXiMRxrY/QPKi/wONwqsbDxRW7vZRVKs78pBkE0ksaShlZk5GkeayDWC/7Hi/NqUFtIloK9XB3paLxo1DGu5qqaF34jZdktzkXp0uZqpp+FfKZaiovMjt8F7yHCPk+LYpRsU2Cyc9DVoDA6rIgf+uEP4jppgehsxyT0lJHax2t869R2jYdsXwYUXjgwHIV0voj7bJYPGFlFjXOp6ZW86scsHM5xfsGQoK2Fp838VT34SHE1ZXU/puM7rviREHYW72pfpgGZUILQMohuTPnd8tFtAkbrmjLDo+k9xx7HUvgoFTiNNWuq/cRjr70FKNguMMTIrid+HwfmbRoaxENWdLcOTNeascER2a+37UQolKD5ksrPJG6RdNA7O2pzp3micDYRs/+s28cCIxO//J/d4nsgHp6RTuCu4+Jm9k0YTw2Xg75b2cWKrxGnDUgyIlvNPaZTB5QbMid4x44/lE0LLi9kcPQhRgrK07OnnrMgZvVGjt1CLGhKUv7KFc3xV1r1rwKkosxnoG99oCoTQtregcX5rIMjHgkc1IdflGJkZzaWMkYVFOJ4Weynz008i4ddkske5vabZs37Lb8iggUYNBYZyGzalruBgnQyK4fz38Fae4nWYjyildVfgyo/fCePR2ovOfphx9OQJi+M9BoFmPrAg+8ARDZ+R+5yzYuEc9ZoVX7nkp7LTGB3DANBgkrBgEEAYI3EQIxADATBgkqhkiG9w0BCRUxBgQEAQAAADBXBgkqhkiG9w0BCRQxSh5IAGEAOAAwAGQAZgBmADgANgAtAGUAOQA2AGUALQA0ADIAMgA0AC0AYQBhADEAMQAtAGIAZAAxADkANABkADUAYQA2AGIANwA3MF0GCSsGAQQBgjcRATFQHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABvAGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIwggLPBgkqhkiG9w0BBwagggLAMIICvAIBADCCArUGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEGMA4ECNX+VL2MxzzWAgIH0ICCAojmRBO+CPfVNUO0s+BVuwhOzikAGNBmQHNChmJ/pyzPbMUbx7tO63eIVSc67iERda2WCEmVwPigaVQkPaumsfp8+L6iV/BMf5RKlyRXcwh0vUdu2Qa7qadD+gFQ2kngf4Dk6vYo2/2HxayuIf6jpwe8vql4ca3ZtWXfuRix2fwgltM0bMz1g59d7x/glTfNqxNlsty0A/rWrPJjNbOPRU2XykLuc3AtlTtYsQ32Zsmu67A7UNBw6tVtkEXlFDqhavEhUEO3dvYqMY+QLxzpZhA0q44ZZ9/ex0X6QAFNK5wuWxCbupHWsgxRwKftrxyszMHsAvNoNcTlqcctee+ecNwTJQa1/MDbnhO6/qHA7cfG1qYDq8Th635vGNMW1w3sVS7l0uEvdayAsBHWTcOC2tlMa5bfHrhY8OEIqj5bN5H9RdFy8G/W239tjDu1OYjBDydiBqzBn8HG1DSj1Pjc0kd/82d4ZU0308KFTC3yGcRad0GnEH0Oi3iEJ9HbriUbfVMbXNHOF+MktWiDVqzndGMKmuJSdfTBKvGFvejAWVO5E4mgLvoaMmbchc3BO7sLeraHnJN5hvMBaLcQI38N86mUfTR8AP6AJ9c2k514KaDLclm4z6J8dMz60nUeo5D3YD09G6BavFHxSvJ8MF0Lu5zOFzEePDRFm9mH8W0N/sFlIaYfD/GWU/w44mQucjaBk95YtqOGRIj58tGDWr8iUdHwaYKGqU24zGeRae9DhFXPzZshV1ZGsBQFRaoYkyLAwdJWIXTi+c37YaC8FRSEnnNmS79Dou1Kc3BvK4EYKAD2KxjtUebrV174gD0Q+9YuJ0GXOTspBvCFd5VT2Rw5zDNrA/J3F5fMCk4wOzAfMAcGBSsOAwIaBBSxgh2xyF+88V4vAffBmZXv8Txt4AQU4O/NX4MjxSodbE7ApNAMIvrtREwCAgfQ", + ], + "_justification": "Secret used by test code, it is fake." + }, + { + "file":[ + "sdk/keyvault/azure-keyvault-certificates/tests/ca.key", + "sdk/identity/azure-identity/tests/ec-certificate.pem", + "sdk/core/azure-servicemanagement-legacy/tests/legacy_mgmt_settings_fake.py", + "sdk/storage/azure-storage-blob/tests/blob_settings_fake.py", + "sdk/storage/azure-storage-file-datalake/tests/data_lake_settings_fake.py", + "tools/azure-sdk-tools/devtools_testutils/mgmt_settings_fake.py" + ], + "_justification": "File contains private key used by test code." + } + ] +} \ No newline at end of file diff --git a/eng/ci_tools.txt b/eng/ci_tools.txt index dac85fba9d07..444dcba79cde 100644 --- a/eng/ci_tools.txt +++ b/eng/ci_tools.txt @@ -1,8 +1,9 @@ # requirements leveraged by ci tools +cryptography==3.1 setuptools==44.1.0; python_version == '2.7' setuptools==46.4.0; python_version >= '3.5' virtualenv==20.0.23 -wheel==0.34.2 +wheel==0.34.2 Jinja2==2.11.2 packaging==20.4 tox==3.15.0 @@ -17,6 +18,7 @@ coverage==4.5.4 codecov==2.1.0 beautifulsoup4==4.9.1 pkginfo==1.5.0.1 +pip==20.2 # locking packages defined as deps from azure-sdk-tools or azure-devtools pytoml==0.1.21 diff --git a/eng/common/README.md b/eng/common/README.md index 7e9e197fc466..b9867f0705e2 100644 --- a/eng/common/README.md +++ b/eng/common/README.md @@ -1,12 +1,3 @@ # Common Engineering System -The `eng/common` directory contains engineering files that are common across the various azure-sdk language repos. -It should remain relatively small and only contain textual based files like scripts, configs, or templates. It -should not contain binary files as they don't play well with git. - -# Updating - -Any updates to files in the `eng/common` directory should be made in the [azure-sdk-tools](https://github.com/azure/azure-sdk-tools) repo. -All changes made will cause a PR to created in all subscribed azure-sdk language repos which will blindly replace all contents of -the `eng/common` directory in that repo. For that reason do **NOT** make changes to files in this directory in the individual azure-sdk -languages repos as they will be overwritten the next time an update is taken from the common azure-sdk-tools repo. \ No newline at end of file +Updates under this directory should only be made in the `azure-sdk-tools` repo as any changes under this directory outside of that repo will end up getting overwritten with future updates. For information about making updates see [common engineering system docs](https://github.com/Azure/azure-sdk-tools/blob/master/doc/common/common_engsys.md) diff --git a/eng/common/TestResources/AzurePowerShellV4/Utility.ps1 b/eng/common/TestResources/AzurePowerShellV4/Utility.ps1 new file mode 100644 index 000000000000..0bac797fb14b --- /dev/null +++ b/eng/common/TestResources/AzurePowerShellV4/Utility.ps1 @@ -0,0 +1,153 @@ +# Copied from https://github.com/microsoft/azure-pipelines-tasks/blob/a1502bbe67561f5bec8402f32c997406f798a019/Tasks/AzurePowerShellV4/Utility.ps1 + +function Get-SavedModulePath { + [CmdletBinding()] + param([string] $azurePowerShellVersion) + $savedModulePath = $($env:SystemDrive + "\Modules\az_" + $azurePowerShellVersion) + Write-Verbose "The value of the module path is: $savedModulePath" + return $savedModulePath +} + +function Get-SavedModulePathLinux { + [CmdletBinding()] + param([string] $azurePowerShellVersion) + $savedModulePath = $("/usr/share/az_" + $azurePowerShellVersion) + Write-Verbose "The value of the module path is: $savedModulePath" + return $savedModulePath +} + +function Update-PSModulePathForHostedAgent { + [CmdletBinding()] + param([string] $targetAzurePs) + try { + if ($targetAzurePs) { + $hostedAgentAzModulePath = Get-SavedModulePath -azurePowerShellVersion $targetAzurePs + } + else { + $hostedAgentAzModulePath = Get-LatestModule -patternToMatch "^az_[0-9]+\.[0-9]+\.[0-9]+$" -patternToExtract "[0-9]+\.[0-9]+\.[0-9]+$" + } + $env:PSModulePath = $hostedAgentAzModulePath + ";" + $env:PSModulePath + $env:PSModulePath = $env:PSModulePath.TrimStart(';') + } finally { + Write-Verbose "The updated value of the PSModulePath is: $($env:PSModulePath)" + } +} + +function Update-PSModulePathForHostedAgentLinux { + [CmdletBinding()] + param([string] $targetAzurePs) + try { + if ($targetAzurePs) { + $hostedAgentAzModulePath = Get-SavedModulePathLinux -azurePowerShellVersion $targetAzurePs + if(!(Test-Path $hostedAgentAzModulePath)) { + Write-Verbose "No module path found with this name" + throw ("Could not find the module path with given version.") + } + } + else { + $hostedAgentAzModulePath = Get-LatestModuleLinux -patternToMatch "^az_[0-9]+\.[0-9]+\.[0-9]+$" -patternToExtract "[0-9]+\.[0-9]+\.[0-9]+$" + } + $env:PSModulePath = $hostedAgentAzModulePath + ":" + $env:PSModulePath + $env:PSModulePath = $env:PSModulePath.TrimStart(':') + } finally { + Write-Verbose "The updated value of the PSModulePath is: $($env:PSModulePath)" + } +} + +function Get-LatestModule { + [CmdletBinding()] + param([string] $patternToMatch, + [string] $patternToExtract) + + $resultFolder = "" + $regexToMatch = New-Object -TypeName System.Text.RegularExpressions.Regex -ArgumentList $patternToMatch + $regexToExtract = New-Object -TypeName System.Text.RegularExpressions.Regex -ArgumentList $patternToExtract + $maxVersion = [version] "0.0.0" + $modulePath = $env:SystemDrive + "\Modules"; + + try { + if (-not (Test-Path -Path $modulePath)) { + return $resultFolder + } + + $moduleFolders = Get-ChildItem -Directory -Path $modulePath | Where-Object { $regexToMatch.IsMatch($_.Name) } + foreach ($moduleFolder in $moduleFolders) { + $moduleVersion = [version] $($regexToExtract.Match($moduleFolder.Name).Groups[0].Value) + if($moduleVersion -gt $maxVersion) { + $modulePath = [System.IO.Path]::Combine($moduleFolder.FullName,"Az\$moduleVersion\Az.psm1") + + if(Test-Path -LiteralPath $modulePath -PathType Leaf) { + $maxVersion = $moduleVersion + $resultFolder = $moduleFolder.FullName + } else { + Write-Verbose "A folder matching the module folder pattern was found at $($moduleFolder.FullName) but didn't contain a valid module file" + } + } + } + } + catch { + Write-Verbose "Attempting to find the Latest Module Folder failed with the error: $($_.Exception.Message)" + $resultFolder = "" + } + Write-Verbose "Latest module folder detected: $resultFolder" + return $resultFolder +} + +function Get-LatestModuleLinux { + [CmdletBinding()] + param([string] $patternToMatch, + [string] $patternToExtract) + + $resultFolder = "" + $regexToMatch = New-Object -TypeName System.Text.RegularExpressions.Regex -ArgumentList $patternToMatch + $regexToExtract = New-Object -TypeName System.Text.RegularExpressions.Regex -ArgumentList $patternToExtract + $maxVersion = [version] "0.0.0" + + try { + $moduleFolders = Get-ChildItem -Directory -Path $("/usr/share") | Where-Object { $regexToMatch.IsMatch($_.Name) } + foreach ($moduleFolder in $moduleFolders) { + $moduleVersion = [version] $($regexToExtract.Match($moduleFolder.Name).Groups[0].Value) + if($moduleVersion -gt $maxVersion) { + $modulePath = [System.IO.Path]::Combine($moduleFolder.FullName,"Az/$moduleVersion/Az.psm1") + + if(Test-Path -LiteralPath $modulePath -PathType Leaf) { + $maxVersion = $moduleVersion + $resultFolder = $moduleFolder.FullName + } else { + Write-Verbose "A folder matching the module folder pattern was found at $($moduleFolder.FullName) but didn't contain a valid module file" + } + } + } + } + catch { + Write-Verbose "Attempting to find the Latest Module Folder failed with the error: $($_.Exception.Message)" + $resultFolder = "" + } + Write-Verbose "Latest module folder detected: $resultFolder" + return $resultFolder +} + +function CleanUp-PSModulePathForHostedAgent { + # Clean up PSModulePath for hosted agent + $azureRMModulePath = "C:\Modules\azurerm_2.1.0" + $azureModulePath = "C:\Modules\azure_2.1.0" + $azPSModulePath = $env:PSModulePath + + if ($azPSModulePath.split(";") -contains $azureRMModulePath) { + $azPSModulePath = (($azPSModulePath).Split(";") | ? { $_ -ne $azureRMModulePath }) -join ";" + write-verbose "$azureRMModulePath removed. Restart the prompt for the changes to take effect." + } + else { + write-verbose "$azureRMModulePath is not present in $azPSModulePath" + } + + if ($azPSModulePath.split(";") -contains $azureModulePath) { + $azPSModulePath = (($azPSModulePath).Split(";") | ? { $_ -ne $azureModulePath }) -join ";" + write-verbose "$azureModulePath removed. Restart the prompt for the changes to take effect." + } + else { + write-verbose "$azureModulePath is not present in $azPSModulePath" + } + + $env:PSModulePath = $azPSModulePath +} diff --git a/eng/common/TestResources/Import-AzModules.ps1 b/eng/common/TestResources/Import-AzModules.ps1 new file mode 100644 index 000000000000..3cd7b3124489 --- /dev/null +++ b/eng/common/TestResources/Import-AzModules.ps1 @@ -0,0 +1,11 @@ +. "$PSScriptRoot/AzurePowerShellV4/Utility.ps1" + +if ($IsWindows) { + # Copied from https://github.com/microsoft/azure-pipelines-tasks/blob/9cc8e1b3ee37dc023c81290de1dd522b77faccf7/Tasks/AzurePowerShellV4/AzurePowerShell.ps1#L57-L58 + CleanUp-PSModulePathForHostedAgent + Update-PSModulePathForHostedAgent +} +else { + # Copied from https://github.com/microsoft/azure-pipelines-tasks/blob/9cc8e1b3ee37dc023c81290de1dd522b77faccf7/Tasks/AzurePowerShellV4/InitializeAz.ps1#L16 + Update-PSModulePathForHostedAgentLinux +} diff --git a/eng/common/TestResources/New-TestResources.ps1 b/eng/common/TestResources/New-TestResources.ps1 index 90e496d2e83d..e1b95b2be768 100644 --- a/eng/common/TestResources/New-TestResources.ps1 +++ b/eng/common/TestResources/New-TestResources.ps1 @@ -10,15 +10,18 @@ [CmdletBinding(DefaultParameterSetName = 'Default', SupportsShouldProcess = $true, ConfirmImpact = 'Medium')] param ( - # Limit $BaseName to enough characters to be under limit plus prefixes, and https://docs.microsoft.com/azure/architecture/best-practices/resource-naming. - [Parameter(Mandatory = $true, Position = 0)] + # Limit $BaseName to enough characters to be under limit plus prefixes, and https://docs.microsoft.com/azure/architecture/best-practices/resource-naming + [Parameter()] [ValidatePattern('^[-a-zA-Z0-9\.\(\)_]{0,80}(?<=[a-zA-Z0-9\(\)])$')] [string] $BaseName, - [Parameter(Mandatory = $true)] + [ValidatePattern('^[-\w\._\(\)]+$')] + [string] $ResourceGroupName, + + [Parameter(Mandatory = $true, Position = 0)] [string] $ServiceDirectory, - [Parameter(Mandatory = $true)] + [Parameter()] [ValidatePattern('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')] [string] $TestApplicationId, @@ -33,7 +36,8 @@ param ( [ValidateNotNullOrEmpty()] [string] $TenantId, - [Parameter(ParameterSetName = 'Provisioner')] + # Azure SDK Developer Playground subscription + [Parameter()] [ValidatePattern('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')] [string] $SubscriptionId, @@ -45,19 +49,26 @@ param ( [string] $ProvisionerApplicationSecret, [Parameter()] - [ValidateRange(0, [int]::MaxValue)] - [int] $DeleteAfterHours, + [ValidateRange(1, [int]::MaxValue)] + [int] $DeleteAfterHours = 48, [Parameter()] [string] $Location = '', [Parameter()] - [ValidateSet('AzureCloud', 'AzureUSGovernment', 'AzureChinaCloud')] + [ValidateSet('AzureCloud', 'AzureUSGovernment', 'AzureChinaCloud', 'Dogfood')] [string] $Environment = 'AzureCloud', + [Parameter()] + [hashtable] $ArmTemplateParameters, + [Parameter()] [hashtable] $AdditionalParameters, + [Parameter()] + [ValidateNotNull()] + [hashtable] $EnvironmentVariables = @{}, + [Parameter()] [switch] $CI = ($null -ne $env:SYSTEM_TEAMPROJECTID), @@ -98,6 +109,16 @@ function Retry([scriptblock] $Action, [int] $Attempts = 5) { } } +function MergeHashes([hashtable] $source, [psvariable] $dest) { + foreach ($key in $source.Keys) { + if ($dest.Value.ContainsKey($key) -and $dest.Value[$key] -ne $source[$key]) { + Write-Warning ("Overwriting '$($dest.Name).$($key)' with value '$($dest.Value[$key])' " + + "to new value '$($source[$key])'") + } + $dest.Value[$key] = $source[$key] + } +} + # Support actions to invoke on exit. $exitActions = @({ if ($exitActions.Count -gt 1) { @@ -105,284 +126,446 @@ $exitActions = @({ } }) -trap { - # Like using try..finally in PowerShell, but without keeping track of more braces or tabbing content. - $exitActions.Invoke() +New-Variable -Name 'initialContext' -Value (Get-AzContext) -Option Constant +if ($initialContext) { + $exitActions += { + Write-Verbose "Restoring initial context: $($initialContext.Account)" + $null = $initialContext | Select-AzContext + } } -# Enumerate test resources to deploy. Fail if none found. -$repositoryRoot = "$PSScriptRoot/../../.." | Resolve-Path -$root = [System.IO.Path]::Combine($repositoryRoot, "sdk", $ServiceDirectory) | Resolve-Path -$templateFileName = 'test-resources.json' -$templateFiles = @() +# try..finally will also trap Ctrl+C. +try { -Write-Verbose "Checking for '$templateFileName' files under '$root'" -Get-ChildItem -Path $root -Filter $templateFileName -Recurse | ForEach-Object { - $templateFile = $_.FullName + # Enumerate test resources to deploy. Fail if none found. + $repositoryRoot = "$PSScriptRoot/../../.." | Resolve-Path + $root = [System.IO.Path]::Combine($repositoryRoot, "sdk", $ServiceDirectory) | Resolve-Path + $templateFileName = 'test-resources.json' + $templateFiles = @() - Write-Verbose "Found template '$templateFile'" - $templateFiles += $templateFile -} + Write-Verbose "Checking for '$templateFileName' files under '$root'" + Get-ChildItem -Path $root -Filter $templateFileName -Recurse | ForEach-Object { + $templateFile = $_.FullName -if (!$templateFiles) { - Write-Warning -Message "No template files found under '$root'" - exit -} + Write-Verbose "Found template '$templateFile'" + $templateFiles += $templateFile + } -# If no location is specified use safe default locations for the given -# environment. If no matching environment is found $Location remains an empty -# string. -if (!$Location) { - $Location = @{ - 'AzureCloud' = 'westus2'; - 'AzureUSGovernment' = 'usgovvirginia'; - 'AzureChinaCloud' = 'chinaeast2'; - }[$Environment] - - Write-Verbose "Location was not set. Using default location for environment: '$Location'" -} + if (!$templateFiles) { + Write-Warning -Message "No template files found under '$root'" + exit + } -# Log in if requested; otherwise, the user is expected to already be authenticated via Connect-AzAccount. -if ($ProvisionerApplicationId) { - $null = Disable-AzContextAutosave -Scope Process + $UserName = if ($env:USER) { $env:USER } else { "${env:USERNAME}" } + # Remove spaces, etc. that may be in $UserName + $UserName = $UserName -replace '\W' + + # Make sure $BaseName is set. + if ($CI) { + $BaseName = 't' + (New-Guid).ToString('n').Substring(0, 16) + Log "Generated base name '$BaseName' for CI build" + } elseif (!$BaseName) { + $BaseName = "$UserName$ServiceDirectory" + Log "BaseName was not set. Using default base name '$BaseName'" + } - Log "Logging into service principal '$ProvisionerApplicationId'" - $provisionerSecret = ConvertTo-SecureString -String $ProvisionerApplicationSecret -AsPlainText -Force - $provisionerCredential = [System.Management.Automation.PSCredential]::new($ProvisionerApplicationId, $provisionerSecret) + # Make sure pre- and post-scripts are passed formerly required arguments. + $PSBoundParameters['BaseName'] = $BaseName - # Use the given subscription ID if provided. - $subscriptionArgs = if ($SubscriptionId) { - @{SubscriptionId = $SubscriptionId} - } - else { - @{} + # Try detecting repos that support OutFile and defaulting to it + if (!$CI -and !$PSBoundParameters.ContainsKey('OutFile') -and $IsWindows) { + # TODO: find a better way to detect the language + if (Test-Path "$repositoryRoot/eng/service.proj") { + $OutFile = $true + Log "Detected .NET repository. Defaulting OutFile to true. Test environment settings would be stored into the file so you don't need to set environment variables manually." + } } - $provisionerAccount = Retry { - Connect-AzAccount -Force:$Force -Tenant $TenantId -Credential $provisionerCredential -ServicePrincipal -Environment $Environment @subscriptionArgs + # If no location is specified use safe default locations for the given + # environment. If no matching environment is found $Location remains an empty + # string. + if (!$Location) { + $Location = @{ + 'AzureCloud' = 'westus2'; + 'AzureUSGovernment' = 'usgovvirginia'; + 'AzureChinaCloud' = 'chinaeast2'; + 'Dogfood' = 'westus' + }[$Environment] + + Write-Verbose "Location was not set. Using default location for environment: '$Location'" } - $exitActions += { - Write-Verbose "Logging out of service principal '$($provisionerAccount.Context.Account)'" + if (!$CI) { - # Only attempt to disconnect if the -WhatIf flag was not set. Otherwise, this call is not necessary and will fail. - if ($PSCmdlet.ShouldProcess($ProvisionerApplicationId)) { - $null = Disconnect-AzAccount -AzureContext $provisionerAccount.Context + # Make sure the user is logged in to create a service principal. + $context = Get-AzContext; + if (!$context) { + Log 'User not logged in. Logging in now...' + $context = (Connect-AzAccount).Context } - } -} -# Get test application OID from ID if not already provided. -if ($TestApplicationId -and !$TestApplicationOid) { - $testServicePrincipal = Retry { - Get-AzADServicePrincipal -ApplicationId $TestApplicationId - } + $currentSubcriptionId = $context.Subscription.Id + + # If no subscription was specified, try to select the Azure SDK Developer Playground subscription. + # Ignore errors to leave the automatically selected subscription. + if ($SubscriptionId) { + if ($currentSubcriptionId -ne $SubscriptionId) { + Log "Selecting subscription '$SubscriptionId'" + $null = Select-AzSubscription -Subscription $SubscriptionId + + $exitActions += { + Log "Selecting previous subscription '$currentSubcriptionId'" + $null = Select-AzSubscription -Subscription $currentSubcriptionId + } + + # Update the context. + $context = Get-AzContext + } + } else { + if ($currentSubcriptionId -ne 'faa080af-c1d8-40ad-9cce-e1a450ca5b57') { + Log "Attempting to select subscription 'Azure SDK Developer Playground (faa080af-c1d8-40ad-9cce-e1a450ca5b57)'" + $null = Select-AzSubscription -Subscription 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' -ErrorAction Ignore - if ($testServicePrincipal -and $testServicePrincipal.Id) { - $script:TestApplicationOid = $testServicePrincipal.Id + # Update the context. + $context = Get-AzContext + } + + $SubscriptionId = $context.Subscription.Id + $PSBoundParameters['SubscriptionId'] = $SubscriptionId + } + + # Use cache of well-known team subs without having to be authenticated. + $wellKnownSubscriptions = @{ + 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' = 'Azure SDK Developer Playground' + 'a18897a6-7e44-457d-9260-f2854c0aca42' = 'Azure SDK Engineering System' + '2cd617ea-1866-46b1-90e3-fffb087ebf9b' = 'Azure SDK Test Resources' + } + + # Print which subscription is currently selected. + $subscriptionName = $context.Subscription.Id + if ($wellKnownSubscriptions.ContainsKey($subscriptionName)) { + $subscriptionName = '{0} ({1})' -f $wellKnownSubscriptions[$subscriptionName], $subscriptionName + } + + Log "Using subscription '$subscriptionName'" + + # Make sure the TenantId is also updated from the current context. + # PSBoundParameters is not updated to avoid confusing parameter sets. + if (!$TenantId) { + $TenantId = $context.Subscription.TenantId + } + + # If no test application ID is specified during an interactive session, create a new service principal. + if (!$TestApplicationId) { + + # Cache the created service principal in this session for frequent reuse. + $servicePrincipal = if ($AzureTestPrincipal -and (Get-AzADServicePrincipal -ApplicationId $AzureTestPrincipal.ApplicationId) -and $AzureTestSubscription -eq $SubscriptionId) { + Log "TestApplicationId was not specified; loading cached service principal '$($AzureTestPrincipal.ApplicationId)'" + $AzureTestPrincipal + } else { + Log "TestApplicationId was not specified; creating a new service principal in subscription '$SubscriptionId'" + $global:AzureTestPrincipal = New-AzADServicePrincipal -Role Owner -Scope "/subscriptions/$SubscriptionId" + $global:AzureTestSubscription = $SubscriptionId + + Log "Created service principal '$($AzureTestPrincipal.ApplicationId)'" + $AzureTestPrincipal + } + + $TestApplicationId = $servicePrincipal.ApplicationId + $TestApplicationSecret = (ConvertFrom-SecureString $servicePrincipal.Secret -AsPlainText); + + # Make sure pre- and post-scripts are passed formerly required arguments. + $PSBoundParameters['TestApplicationId'] = $TestApplicationId + $PSBoundParameters['TestApplicationSecret'] = $TestApplicationSecret + } + + if (!$ProvisionerApplicationId) { + $ProvisionerApplicationId = $TestApplicationId + $ProvisionerApplicationSecret = $TestApplicationSecret + $TenantId = $context.Tenant.Id + } } -} -# If the ServiceDirectory is an absolute path use the last directory name -# (e.g. D:\foo\bar\ -> bar) -$serviceName = if (Split-Path -IsAbsolute $ServiceDirectory) { - Split-Path -Leaf $ServiceDirectory -} else { - $ServiceDirectory -} + # Log in as and run pre- and post-scripts as the provisioner service principal. + if ($ProvisionerApplicationId) { + $null = Disable-AzContextAutosave -Scope Process -# Format the resource group name based on resource group naming recommendations and limitations. -$resourceGroupName = if ($CI) { - $BaseName = 't' + (New-Guid).ToString('n').Substring(0, 16) - Write-Verbose "Generated base name '$BaseName' for CI build" + Log "Logging into service principal '$ProvisionerApplicationId'." + Write-Warning 'Logging into service principal may fail until the principal is fully propagated.' - "rg-{0}-$BaseName" -f ($serviceName -replace '[\\\/:]', '-').Substring(0, [Math]::Min($serviceName.Length, 90 - $BaseName.Length - 4)).Trim('-') -} else { - "rg-$BaseName" -} + $provisionerSecret = ConvertTo-SecureString -String $ProvisionerApplicationSecret -AsPlainText -Force + $provisionerCredential = [System.Management.Automation.PSCredential]::new($ProvisionerApplicationId, $provisionerSecret) -# Tag the resource group to be deleted after a certain number of hours if specified. -$tags = @{ - Creator = if ($env:USER) { $env:USER } else { "${env:USERNAME}" } - ServiceDirectory = $ServiceDirectory -} + # Use the given subscription ID if provided. + $subscriptionArgs = if ($SubscriptionId) { + @{Subscription = $SubscriptionId} + } else { + @{} + } -if ($PSBoundParameters.ContainsKey('DeleteAfterHours')) { - $deleteAfter = [DateTime]::UtcNow.AddHours($DeleteAfterHours) - $tags.Add('DeleteAfter', $deleteAfter.ToString('o')) -} + $provisionerAccount = Retry { + Connect-AzAccount -Force:$Force -Tenant $TenantId -Credential $provisionerCredential -ServicePrincipal -Environment $Environment @subscriptionArgs + } -if ($CI) { - # Add tags for the current CI job. - $tags += @{ - BuildId = "${env:BUILD_BUILDID}" - BuildJob = "${env:AGENT_JOBNAME}" - BuildNumber = "${env:BUILD_BUILDNUMBER}" - BuildReason = "${env:BUILD_REASON}" + $exitActions += { + Write-Verbose "Logging out of service principal '$($provisionerAccount.Context.Account)'" + + # Only attempt to disconnect if the -WhatIf flag was not set. Otherwise, this call is not necessary and will fail. + if ($PSCmdlet.ShouldProcess($ProvisionerApplicationId)) { + $null = Disconnect-AzAccount -AzureContext $provisionerAccount.Context + } + } } - # Set the resource group name variable. - Write-Host "Setting variable 'AZURE_RESOURCEGROUP_NAME': $resourceGroupName" - Write-Host "##vso[task.setvariable variable=AZURE_RESOURCEGROUP_NAME;]$resourceGroupName" -} + # Get test application OID from ID if not already provided. + if ($TestApplicationId -and !$TestApplicationOid) { + $testServicePrincipal = Retry { + Get-AzADServicePrincipal -ApplicationId $TestApplicationId + } -Log "Creating resource group '$resourceGroupName' in location '$Location'" -$resourceGroup = Retry { - New-AzResourceGroup -Name "$resourceGroupName" -Location $Location -Tag $tags -Force:$Force -} + if ($testServicePrincipal -and $testServicePrincipal.Id) { + $script:TestApplicationOid = $testServicePrincipal.Id + } + } -if ($resourceGroup.ProvisioningState -eq 'Succeeded') { - # New-AzResourceGroup would've written an error and stopped the pipeline by default anyway. - Write-Verbose "Successfully created resource group '$($resourceGroup.ResourceGroupName)'" -} -elseif (($resourceGroup -eq $null) -and (-not $PSCmdlet.ShouldProcess($resourceGroupName))) { - # If the -WhatIf flag was passed, there will be no resource group created. Fake it. - $resourceGroup = [PSCustomObject]@{ - ResourceGroupName = $resourceGroupName - Location = $Location + # Determine the Azure context that the script is running in. + $context = Get-AzContext; + + # If the ServiceDirectory is an absolute path use the last directory name + # (e.g. D:\foo\bar\ -> bar) + $serviceName = if (Split-Path -IsAbsolute $ServiceDirectory) { + Split-Path -Leaf $ServiceDirectory + } else { + $ServiceDirectory } -} -# Populate the template parameters and merge any additional specified. -$templateParameters = @{ - baseName = $BaseName - testApplicationId = $TestApplicationId - testApplicationOid = "$TestApplicationOid" -} + $ResourceGroupName = if ($ResourceGroupName) { + $ResourceGroupName + } elseif ($CI) { + # Format the resource group name based on resource group naming recommendations and limitations. + "rg-{0}-$BaseName" -f ($serviceName -replace '[\\\/:]', '-').Substring(0, [Math]::Min($serviceName.Length, 90 - $BaseName.Length - 4)).Trim('-') + } else { + "rg-$BaseName" + } -if ($TenantId) { - $templateParameters.Add('tenantId', $TenantId) -} -if ($TestApplicationSecret) { - $templateParameters.Add('testApplicationSecret', $TestApplicationSecret) -} -if ($AdditionalParameters) { - $templateParameters += $AdditionalParameters -} + $tags = @{ + Creator = $UserName + ServiceDirectory = $ServiceDirectory + } -# Try to detect the shell based on the parent process name (e.g. launch via shebang). -$shell, $shellExportFormat = if (($parentProcessName = (Get-Process -Id $PID).Parent.ProcessName) -and $parentProcessName -eq 'cmd') { - 'cmd', 'set {0}={1}' -} elseif (@('bash', 'csh', 'tcsh', 'zsh') -contains $parentProcessName) { - 'shell', 'export {0}={1}' -} else { - 'PowerShell', '$env:{0} = ''{1}''' -} + # Tag the resource group to be deleted after a certain number of hours. + Write-Warning "Any clean-up scripts running against subscription '$SubscriptionId' may delete resource group '$ResourceGroupName' after $DeleteAfterHours hours." + $deleteAfter = [DateTime]::UtcNow.AddHours($DeleteAfterHours).ToString('o') + $tags['DeleteAfter'] = $deleteAfter + + if ($CI) { + # Add tags for the current CI job. + $tags += @{ + BuildId = "${env:BUILD_BUILDID}" + BuildJob = "${env:AGENT_JOBNAME}" + BuildNumber = "${env:BUILD_BUILDNUMBER}" + BuildReason = "${env:BUILD_REASON}" + } -foreach ($templateFile in $templateFiles) { - # Deployment fails if we pass in more parameters than are defined. - Write-Verbose "Removing unnecessary parameters from template '$templateFile'" - $templateJson = Get-Content -LiteralPath $templateFile | ConvertFrom-Json - $templateParameterNames = $templateJson.parameters.PSObject.Properties.Name - - $templateFileParameters = $templateParameters.Clone() - foreach ($key in $templateParameters.Keys) { - if ($templateParameterNames -notcontains $key) { - Write-Verbose "Removing unnecessary parameter '$key'" - $templateFileParameters.Remove($key) + # Set the resource group name variable. + Write-Host "Setting variable 'AZURE_RESOURCEGROUP_NAME': $ResourceGroupName" + Write-Host "##vso[task.setvariable variable=AZURE_RESOURCEGROUP_NAME;]$ResourceGroupName" + if ($EnvironmentVariables.ContainsKey('AZURE_RESOURCEGROUP_NAME') -and ` + $EnvironmentVariables['AZURE_RESOURCEGROUP_NAME'] -ne $ResourceGroupName) + { + Write-Warning ("Overwriting 'EnvironmentVariables.AZURE_RESOURCEGROUP_NAME' with value " + + "'$($EnvironmentVariables['AZURE_RESOURCEGROUP_NAME'])' " + "to new value '$($ResourceGroupName)'") } + $EnvironmentVariables['AZURE_RESOURCEGROUP_NAME'] = $ResourceGroupName } - $preDeploymentScript = $templateFile | Split-Path | Join-Path -ChildPath 'test-resources-pre.ps1' - if (Test-Path $preDeploymentScript) { - Log "Invoking pre-deployment script '$preDeploymentScript'" - &$preDeploymentScript -ResourceGroupName $resourceGroupName @PSBoundParameters + Log "Creating resource group '$ResourceGroupName' in location '$Location'" + $resourceGroup = Retry { + New-AzResourceGroup -Name "$ResourceGroupName" -Location $Location -Tag $tags -Force:$Force } - Log "Deploying template '$templateFile' to resource group '$($resourceGroup.ResourceGroupName)'" - $deployment = Retry { - New-AzResourceGroupDeployment -Name $BaseName -ResourceGroupName $resourceGroup.ResourceGroupName -TemplateFile $templateFile -TemplateParameterObject $templateFileParameters + if ($resourceGroup.ProvisioningState -eq 'Succeeded') { + # New-AzResourceGroup would've written an error and stopped the pipeline by default anyway. + Write-Verbose "Successfully created resource group '$($resourceGroup.ResourceGroupName)'" + } + elseif (!$resourceGroup) { + if (!$PSCmdlet.ShouldProcess($resourceGroupName)) { + # If the -WhatIf flag was passed, there will be no resource group created. Fake it. + $resourceGroup = [PSCustomObject]@{ + ResourceGroupName = $resourceGroupName + Location = $Location + } + } else { + Write-Error "Resource group '$ResourceGroupName' already exists." -Category ResourceExists -RecommendedAction "Delete resource group '$ResourceGroupName', or overwrite it when redeploying." + } } - if ($deployment.ProvisioningState -eq 'Succeeded') { - # New-AzResourceGroupDeployment would've written an error and stopped the pipeline by default anyway. - Write-Verbose "Successfully deployed template '$templateFile' to resource group '$($resourceGroup.ResourceGroupName)'" + # Populate the template parameters and merge any additional specified. + $templateParameters = @{ + baseName = $BaseName + testApplicationId = $TestApplicationId + testApplicationOid = "$TestApplicationOid" } - $serviceDirectoryPrefix = $serviceName.ToUpperInvariant() + "_" + if ($TenantId) { + $templateParameters.Add('tenantId', $TenantId) + } + if ($TestApplicationSecret) { + $templateParameters.Add('testApplicationSecret', $TestApplicationSecret) + } - $context = Get-AzContext; + MergeHashes $ArmTemplateParameters $(Get-Variable templateParameters) + MergeHashes $AdditionalParameters $(Get-Variable templateParameters) - # Add default values - $deploymentOutputs = @{ - "$($serviceDirectoryPrefix)CLIENT_ID" = $TestApplicationId; - "$($serviceDirectoryPrefix)CLIENT_SECRET" = $TestApplicationSecret; - "$($serviceDirectoryPrefix)TENANT_ID" = $context.Tenant.Id; - "$($serviceDirectoryPrefix)SUBSCRIPTION_ID" = $context.Subscription.Id; - "$($serviceDirectoryPrefix)RESOURCE_GROUP" = $resourceGroup.ResourceGroupName; - "$($serviceDirectoryPrefix)LOCATION" = $resourceGroup.Location; - "$($serviceDirectoryPrefix)ENVIRONMENT" = $context.Environment.Name; - "$($serviceDirectoryPrefix)AZURE_AUTHORITY_HOST" = $context.Environment.ActiveDirectoryAuthority; - "$($serviceDirectoryPrefix)RESOURCE_MANAGER_URL" = $context.Environment.ResourceManagerUrl; - "$($serviceDirectoryPrefix)SERVICE_MANAGEMENT_URL" = $context.Environment.ServiceManagementUrl; + # Include environment-specific parameters only if not already provided as part of the "ArmTemplateParameters" + if (($context.Environment.StorageEndpointSuffix) -and (-not ($templateParameters.ContainsKey('storageEndpointSuffix')))) { + $templateParameters.Add('storageEndpointSuffix', $context.Environment.StorageEndpointSuffix) } - foreach ($key in $deployment.Outputs.Keys) { - $variable = $deployment.Outputs[$key] + # Try to detect the shell based on the parent process name (e.g. launch via shebang). + $shell, $shellExportFormat = if (($parentProcessName = (Get-Process -Id $PID).Parent.ProcessName) -and $parentProcessName -eq 'cmd') { + 'cmd', 'set {0}={1}' + } elseif (@('bash', 'csh', 'tcsh', 'zsh') -contains $parentProcessName) { + 'shell', 'export {0}={1}' + } else { + 'PowerShell', '${{env:{0}}} = ''{1}''' + } - # Work around bug that makes the first few characters of environment variables be lowercase. - $key = $key.ToUpperInvariant() + # Deploy the templates + foreach ($templateFile in $templateFiles) { + # Deployment fails if we pass in more parameters than are defined. + Write-Verbose "Removing unnecessary parameters from template '$templateFile'" + $templateJson = Get-Content -LiteralPath $templateFile | ConvertFrom-Json + $templateParameterNames = $templateJson.parameters.PSObject.Properties.Name + + $templateFileParameters = $templateParameters.Clone() + foreach ($key in $templateParameters.Keys) { + if ($templateParameterNames -notcontains $key) { + Write-Verbose "Removing unnecessary parameter '$key'" + $templateFileParameters.Remove($key) + } + } - if ($variable.Type -eq 'String' -or $variable.Type -eq 'SecureString') { - $deploymentOutputs[$key] = $variable.Value + $preDeploymentScript = $templateFile | Split-Path | Join-Path -ChildPath 'test-resources-pre.ps1' + if (Test-Path $preDeploymentScript) { + Log "Invoking pre-deployment script '$preDeploymentScript'" + &$preDeploymentScript -ResourceGroupName $ResourceGroupName @PSBoundParameters } - } - if ($OutFile) - { - if (!$IsWindows) - { - Write-Host "File option is supported only on Windows" + Log "Deploying template '$templateFile' to resource group '$($resourceGroup.ResourceGroupName)'" + $deployment = Retry { + $lastDebugPreference = $DebugPreference + try { + if ($CI) { + $DebugPreference = 'Continue' + } + New-AzResourceGroupDeployment -Name $BaseName -ResourceGroupName $resourceGroup.ResourceGroupName -TemplateFile $templateFile -TemplateParameterObject $templateFileParameters -Mode Complete -Force:$Force + } catch { + Write-Output @' +##################################################### +# For help debugging live test provisioning issues, # +# see http://aka.ms/azsdk/engsys/live-test-help, # +##################################################### +'@ + throw + } finally { + $DebugPreference = $lastDebugPreference + } } - $outputFile = "$templateFile.env" + if ($deployment.ProvisioningState -eq 'Succeeded') { + # New-AzResourceGroupDeployment would've written an error and stopped the pipeline by default anyway. + Write-Verbose "Successfully deployed template '$templateFile' to resource group '$($resourceGroup.ResourceGroupName)'" + } - $environmentText = $deploymentOutputs | ConvertTo-Json; - $bytes = ([System.Text.Encoding]::UTF8).GetBytes($environmentText) - $protectedBytes = [Security.Cryptography.ProtectedData]::Protect($bytes, $null, [Security.Cryptography.DataProtectionScope]::CurrentUser) + $serviceDirectoryPrefix = $serviceName.ToUpperInvariant() + "_" + + # Add default values + $deploymentOutputs = @{ + "$($serviceDirectoryPrefix)CLIENT_ID" = $TestApplicationId; + "$($serviceDirectoryPrefix)CLIENT_SECRET" = $TestApplicationSecret; + "$($serviceDirectoryPrefix)TENANT_ID" = $context.Tenant.Id; + "$($serviceDirectoryPrefix)SUBSCRIPTION_ID" = $context.Subscription.Id; + "$($serviceDirectoryPrefix)RESOURCE_GROUP" = $resourceGroup.ResourceGroupName; + "$($serviceDirectoryPrefix)LOCATION" = $resourceGroup.Location; + "$($serviceDirectoryPrefix)ENVIRONMENT" = $context.Environment.Name; + "$($serviceDirectoryPrefix)AZURE_AUTHORITY_HOST" = $context.Environment.ActiveDirectoryAuthority; + "$($serviceDirectoryPrefix)RESOURCE_MANAGER_URL" = $context.Environment.ResourceManagerUrl; + "$($serviceDirectoryPrefix)SERVICE_MANAGEMENT_URL" = $context.Environment.ServiceManagementUrl; + "$($serviceDirectoryPrefix)STORAGE_ENDPOINT_SUFFIX" = $context.Environment.StorageEndpointSuffix; + } - Set-Content $outputFile -Value $protectedBytes -AsByteStream -Force + MergeHashes $EnvironmentVariables $(Get-Variable deploymentOutputs) - Write-Host "Test environment settings`n $environmentText`nstored into encrypted $outputFile" - } - else - { + foreach ($key in $deployment.Outputs.Keys) { + $variable = $deployment.Outputs[$key] - if (!$CI) { - # Write an extra new line to isolate the environment variables for easy reading. - Log "Persist the following environment variables based on your detected shell ($shell):`n" + # Work around bug that makes the first few characters of environment variables be lowercase. + $key = $key.ToUpperInvariant() + + if ($variable.Type -eq 'String' -or $variable.Type -eq 'SecureString') { + $deploymentOutputs[$key] = $variable.Value + } } - foreach ($key in $deploymentOutputs.Keys) - { - $value = $deploymentOutputs[$key] - - if ($CI) { - # Treat all ARM template output variables as secrets since "SecureString" variables do not set values. - # In order to mask secrets but set environment variables for any given ARM template, we set variables twice as shown below. - Write-Host "Setting variable '$key': ***" - Write-Host "##vso[task.setvariable variable=_$key;issecret=true;]$($value)" - Write-Host "##vso[task.setvariable variable=$key;]$($value)" - } else { - Write-Host ($shellExportFormat -f $key, $value) + if ($OutFile) { + if (!$IsWindows) { + Write-Host 'File option is supported only on Windows' + } + + $outputFile = "$templateFile.env" + + $environmentText = $deploymentOutputs | ConvertTo-Json; + $bytes = ([System.Text.Encoding]::UTF8).GetBytes($environmentText) + $protectedBytes = [Security.Cryptography.ProtectedData]::Protect($bytes, $null, [Security.Cryptography.DataProtectionScope]::CurrentUser) + + Set-Content $outputFile -Value $protectedBytes -AsByteStream -Force + + Write-Host "Test environment settings`n $environmentText`nstored into encrypted $outputFile" + } else { + + if (!$CI) { + # Write an extra new line to isolate the environment variables for easy reading. + Log "Persist the following environment variables based on your detected shell ($shell):`n" + } + + foreach ($key in $deploymentOutputs.Keys) { + $value = $deploymentOutputs[$key] + $EnvironmentVariables[$key] = $value + + if ($CI) { + # Treat all ARM template output variables as secrets since "SecureString" variables do not set values. + # In order to mask secrets but set environment variables for any given ARM template, we set variables twice as shown below. + Write-Host "Setting variable '$key': ***" + Write-Host "##vso[task.setvariable variable=_$key;issecret=true;]$($value)" + Write-Host "##vso[task.setvariable variable=$key;]$($value)" + } else { + Write-Host ($shellExportFormat -f $key, $value) + } + } + + if ($key) { + # Isolate the environment variables for easy reading. + Write-Host "`n" + $key = $null } } - if ($key) { - # Isolate the environment variables for easy reading. - Write-Host "`n" - $key = $null + $postDeploymentScript = $templateFile | Split-Path | Join-Path -ChildPath 'test-resources-post.ps1' + if (Test-Path $postDeploymentScript) { + Log "Invoking post-deployment script '$postDeploymentScript'" + &$postDeploymentScript -ResourceGroupName $ResourceGroupName -DeploymentOutputs $deploymentOutputs @PSBoundParameters } } - $postDeploymentScript = $templateFile | Split-Path | Join-Path -ChildPath 'test-resources-post.ps1' - if (Test-Path $postDeploymentScript) { - Log "Invoking post-deployment script '$postDeploymentScript'" - &$postDeploymentScript -ResourceGroupName $resourceGroupName -DeploymentOutputs $deploymentOutputs @PSBoundParameters - } +} finally { + $exitActions.Invoke() } -$exitActions.Invoke() +# Suppress output locally +if ($CI) { + return $EnvironmentVariables +} <# .SYNOPSIS @@ -414,6 +597,10 @@ the ARM template. See also https://docs.microsoft.com/azure/architecture/best-pr Note: The value specified for this parameter will be overriden and generated by New-TestResources.ps1 if $CI is specified. +.PARAMETER ResourceGroupName +Set this value to deploy directly to a Resource Group that has already been +created. + .PARAMETER ServiceDirectory A directory under 'sdk' in the repository root - optionally with subdirectories specified - in which to discover ARM templates named 'test-resources.json'. @@ -441,7 +628,7 @@ test resources (e.g. Role Assignments on resources). It is passed as to the ARM template as 'testApplicationOid' For more information on the relationship between AAD Applications and Service -Principals see: https://docs.microsoft.com/en-us/azure/active-directory/develop/app-objects-and-service-principals +Principals see: https://docs.microsoft.com/azure/active-directory/develop/app-objects-and-service-principals .PARAMETER TenantId The tenant ID of a service principal when a provisioner is specified. The same @@ -452,6 +639,12 @@ is passed to the ARM template as 'tenantId'. Optional subscription ID to use for new resources when logging in as a provisioner. You can also use Set-AzContext if not provisioning. +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + .PARAMETER ProvisionerApplicationId The AAD Application ID used to provision test resources when a provisioner is specified. @@ -469,17 +662,14 @@ If none is specified New-TestResources.ps1 uses the TestApplicationSecret. This value is not passed to the ARM template. .PARAMETER DeleteAfterHours -Optional. Positive integer number of hours from the current time to set the +Positive integer number of hours from the current time to set the 'DeleteAfter' tag on the created resource group. The computed value is a timestamp of the form "2020-03-04T09:07:04.3083910Z". -If this value is not specified no 'DeleteAfter' tag will be assigned to the -created resource group. - An optional cleanup process can delete resource groups whose "DeleteAfter" timestamp is less than the current time. -This isused for CI automation. +This is used for CI automation. .PARAMETER Location Optional location where resources should be created. If left empty, the default @@ -488,14 +678,21 @@ is based on the cloud to which the template is being deployed: * AzureCloud -> 'westus2' * AzureUSGovernment -> 'usgovvirginia' * AzureChinaCloud -> 'chinaeast2' +* Dogfood -> 'westus' .PARAMETER Environment Name of the cloud environment. The default is the Azure Public Cloud -('PublicCloud') +('AzureCloud') .PARAMETER AdditionalParameters +Optional key-value pairs of parameters to pass to the ARM template(s) and pre-post scripts. + +.PARAMETER ArmTemplateParameters Optional key-value pairs of parameters to pass to the ARM template(s). +.PARAMETER EnvironmentVariables +Optional key-value pairs of parameters to set as environment variables to the shell. + .PARAMETER CI Indicates the script is run as part of a Continuous Integration / Continuous Deployment (CI/CD) build (only Azure Pipelines is currently supported). @@ -508,13 +705,8 @@ Save test environment settings into a test-resources.json.env file next to test- The environment file would be scoped to the current repository directory. .EXAMPLE -Connect-AzAccount -Subscription "REPLACE_WITH_SUBSCRIPTION_ID" -$testAadApp = New-AzADServicePrincipal -Role Owner -DisplayName 'azure-sdk-live-test-app' -New-TestResources.ps1 ` - -BaseName 'uuid123' ` - -ServiceDirectory 'keyvault' ` - -TestApplicationId $testAadApp.ApplicationId.ToString() ` - -TestApplicationSecret (ConvertFrom-SecureString $testAadApp.Secret -AsPlainText) +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 keyvault Run this in a desktop environment to create new AAD apps and Service Principals that can be used to provision resources and run live tests. @@ -540,6 +732,4 @@ Run this in an Azure DevOps CI (with approrpiate variables configured) before executing live tests. The script will output variables as secrets (to enable log redaction). -.LINK -Remove-TestResources.ps1 #> diff --git a/eng/common/TestResources/New-TestResources.ps1.md b/eng/common/TestResources/New-TestResources.ps1.md index c9a462aae3a8..75c4676102e7 100644 --- a/eng/common/TestResources/New-TestResources.ps1.md +++ b/eng/common/TestResources/New-TestResources.ps1.md @@ -14,19 +14,21 @@ Deploys live test resources defined for a service directory to Azure. ### Default (Default) ``` -New-TestResources.ps1 [-BaseName] -ServiceDirectory -TestApplicationId - [-TestApplicationSecret ] [-TestApplicationOid ] [-DeleteAfterHours ] - [-Location ] [-Environment ] [-AdditionalParameters ] [-CI] [-Force] [-WhatIf] - [-Confirm] [] +New-TestResources.ps1 [-BaseName ] [-ResourceGroupName ] [-ServiceDirectory] + [-TestApplicationId ] [-TestApplicationSecret ] [-TestApplicationOid ] + [-SubscriptionId ] [-DeleteAfterHours ] [-Location ] [-Environment ] + [-ArmTemplateParameters ] [-AdditionalParameters ] [-EnvironmentVariables ] + [-CI] [-Force] [-OutFile] [-WhatIf] [-Confirm] [] ``` ### Provisioner ``` -New-TestResources.ps1 [-BaseName] -ServiceDirectory -TestApplicationId - [-TestApplicationSecret ] [-TestApplicationOid ] -TenantId [-SubscriptionId ] - -ProvisionerApplicationId -ProvisionerApplicationSecret [-DeleteAfterHours ] - [-Location ] [-Environment ] [-AdditionalParameters ] [-CI] [-Force] [-WhatIf] - [-Confirm] [] +New-TestResources.ps1 [-BaseName ] [-ResourceGroupName ] [-ServiceDirectory] + [-TestApplicationId ] [-TestApplicationSecret ] [-TestApplicationOid ] + -TenantId [-SubscriptionId ] -ProvisionerApplicationId + -ProvisionerApplicationSecret [-DeleteAfterHours ] [-Location ] + [-Environment ] [-ArmTemplateParameters ] [-AdditionalParameters ] + [-EnvironmentVariables ] [-CI] [-Force] [-OutFile] [-WhatIf] [-Confirm] [] ``` ## DESCRIPTION @@ -53,13 +55,8 @@ specified in $ProvisionerApplicationId and $ProvisionerApplicationSecret. ### EXAMPLE 1 ``` -Connect-AzAccount -Subscription "REPLACE_WITH_SUBSCRIPTION_ID" -$testAadApp = New-AzADServicePrincipal -Role Owner -DisplayName 'azure-sdk-live-test-app' -New-TestResources.ps1 ` - -BaseName 'uuid123' ` - -ServiceDirectory 'keyvault' ` - -TestApplicationId $testAadApp.ApplicationId.ToString() ` - -TestApplicationSecret (ConvertFrom-SecureString $testAadApp.Secret -AsPlainText) +Connect-AzAccount -Subscription 'REPLACE_WITH_SUBSCRIPTION_ID' +New-TestResources.ps1 keyvault ``` Run this in a desktop environment to create new AAD apps and Service Principals @@ -105,8 +102,24 @@ Type: String Parameter Sets: (All) Aliases: -Required: True -Position: 1 +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +Set this value to deploy directly to a Resource Group that has already been +created. + +```yaml +Type: String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named Default value: None Accept pipeline input: False Accept wildcard characters: False @@ -123,7 +136,7 @@ Parameter Sets: (All) Aliases: Required: True -Position: Named +Position: 1 Default value: None Accept pipeline input: False Accept wildcard characters: False @@ -142,7 +155,7 @@ Type: String Parameter Sets: (All) Aliases: -Required: True +Required: False Position: Named Default value: None Accept pipeline input: False @@ -180,7 +193,7 @@ It is passed as to the ARM template as 'testApplicationOid' For more information on the relationship between AAD Applications and Service -Principals see: https://docs.microsoft.com/en-us/azure/active-directory/develop/app-objects-and-service-principals +Principals see: https://docs.microsoft.com/azure/active-directory/develop/app-objects-and-service-principals ```yaml Type: String @@ -218,9 +231,15 @@ Optional subscription ID to use for new resources when logging in as a provisioner. You can also use Set-AzContext if not provisioning. +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + ```yaml Type: String -Parameter Sets: Provisioner +Parameter Sets: (All) Aliases: Required: False @@ -271,19 +290,15 @@ Accept wildcard characters: False ``` ### -DeleteAfterHours -Optional. Positive integer number of hours from the current time to set the 'DeleteAfter' tag on the created resource group. The computed value is a timestamp of the form "2020-03-04T09:07:04.3083910Z". -If this value is not specified no 'DeleteAfter' tag will be assigned to the -created resource group. - An optional cleanup process can delete resource groups whose "DeleteAfter" timestamp is less than the current time. -This isused for CI automation. +This is used for CI automation. ```yaml Type: Int32 @@ -292,7 +307,7 @@ Aliases: Required: False Position: Named -Default value: 0 +Default value: 48 Accept pipeline input: False Accept wildcard characters: False ``` @@ -305,6 +320,7 @@ is based on the cloud to which the template is being deployed: * AzureCloud -\> 'westus2' * AzureUSGovernment -\> 'usgovvirginia' * AzureChinaCloud -\> 'chinaeast2' +* Dogfood -\> 'westus' ```yaml Type: String @@ -321,7 +337,7 @@ Accept wildcard characters: False ### -Environment Name of the cloud environment. The default is the Azure Public Cloud -('PublicCloud') +('AzureCloud') ```yaml Type: String @@ -335,7 +351,7 @@ Accept pipeline input: False Accept wildcard characters: False ``` -### -AdditionalParameters +### -ArmTemplateParameters Optional key-value pairs of parameters to pass to the ARM template(s). ```yaml @@ -350,6 +366,36 @@ Accept pipeline input: False Accept wildcard characters: False ``` +### -AdditionalParameters +Optional key-value pairs of parameters to pass to the ARM template(s) and pre-post scripts. + +```yaml +Type: Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnvironmentVariables +Optional key-value pairs of parameters to set as environment variables to the shell. + +```yaml +Type: Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: @{} +Accept pipeline input: False +Accept wildcard characters: False +``` + ### -CI Indicates the script is run as part of a Continuous Integration / Continuous Deployment (CI/CD) build (only Azure Pipelines is currently supported). @@ -381,29 +427,32 @@ Accept pipeline input: False Accept wildcard characters: False ``` -### -WhatIf -Shows what would happen if the cmdlet runs. -The cmdlet is not run. +### -OutFile +Save test environment settings into a test-resources.json.env file next to test-resources.json. +File is protected via DPAPI. +Supported only on windows. +The environment file would be scoped to the current repository directory. ```yaml Type: SwitchParameter Parameter Sets: (All) -Aliases: wi +Aliases: Required: False Position: Named -Default value: None +Default value: False Accept pipeline input: False Accept wildcard characters: False ``` -### -Confirm -Prompts you for confirmation before running the cmdlet. +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. ```yaml Type: SwitchParameter Parameter Sets: (All) -Aliases: cf +Aliases: wi Required: False Position: Named @@ -412,15 +461,13 @@ Accept pipeline input: False Accept wildcard characters: False ``` -### -OutFile -save test environment settings into a test-resources.json.env file next to test-resources.json. -The file is protected via DPAPI. The environment file would be scoped to the current repository directory. -Note: Supported only on Windows. +### -Confirm +Prompts you for confirmation before running the cmdlet. ```yaml Type: SwitchParameter Parameter Sets: (All) -Aliases: +Aliases: cf Required: False Position: Named @@ -430,7 +477,7 @@ Accept wildcard characters: False ``` ### CommonParameters -This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](https://go.microsoft.com/fwlink/?LinkID=113216). ## INPUTS @@ -439,5 +486,3 @@ This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable ## NOTES ## RELATED LINKS - -[Remove-TestResources.ps1](./Remove-TestResources.ps1.md) diff --git a/eng/common/TestResources/README.md b/eng/common/TestResources/README.md index 3c0cd7c2207e..59779f9f9898 100644 --- a/eng/common/TestResources/README.md +++ b/eng/common/TestResources/README.md @@ -19,40 +19,37 @@ scenarios as well as on hosted agents for continuous integration testing. ## On the Desktop To set up your Azure account to run live tests, you'll need to log into Azure, -create a service principal, and set up your resources defined in -test-resources.json as shown in the following example using Azure Search. +and set up your resources defined in test-resources.json as shown in the following +example using Azure Key Vault. The script will create a service principal automatically, +or you may create a service principal you can save and reuse subsequently. Note that `-Subscription` is an optional parameter but recommended if your account -is a member of multiple subscriptions. +is a member of multiple subscriptions. If you didn't specify it when logging in, +you should select your desired subscription using `Select-AzSubscription`. The +default can be saved using `Set-AzDefault` for future sessions. ```powershell Connect-AzAccount -Subscription 'YOUR SUBSCRIPTION ID' -$sp = New-AzADServicePrincipal -Role Owner -eng\common\TestResources\New-TestResources.ps1 ` - -BaseName 'myusername' ` - -ServiceDirectory 'search' ` - -TestApplicationId $sp.ApplicationId ` - -TestApplicationSecret (ConvertFrom-SecureString $sp.Secret -AsPlainText) +eng\common\TestResources\New-TestResources.ps1 keyvault ``` -If you are running this for a .NET project on Windows, the recommended method is to -add the `-OutFile` switch to the above command. This will save test environment settings -into a test-resources.json.env file next to test-resources.json. The file is protected via DPAPI. -The environment file would be scoped to the current repository directory and avoids the need to +The `OutFile` switch will be set by default if you are running this for a .NET project on Windows. This will save test environment settings +into a test-resources.json.env file next to test-resources.json. The file is protected via DPAPI. +The environment file would be scoped to the current repository directory and avoids the need to set environment variables or restart your IDE to recognize them. Along with some log messages, this will output environment variables based on your current shell like in the following example: ```powershell -$env:AZURE_TENANT_ID = '<>' -$env:AZURE_CLIENT_ID = '<>' -$env:AZURE_CLIENT_SECRET = '<>' -$env:AZURE_SUBSCRIPTION_ID = 'YOUR SUBSCRIPTION ID' -$env:AZURE_RESOURCE_GROUP = 'rg-myusername' -$env:AZURE_LOCATION = 'westus2' -$env:AZURE_SEARCH_STORAGE_NAME = 'myusernamestg' -$env:AZURE_SEARCH_STORAGE_KEY = '<>' +${env:KEYVAULT_TENANT_ID} = '<>' +${env:KEYVAULT_CLIENT_ID} = '<>' +${env:KEYVAULT_CLIENT_SECRET} = '<>' +${env:KEYVAULT_SUBSCRIPTION_ID} = 'YOUR SUBSCRIPTION ID' +${env:KEYVAULT_RESOURCE_GROUP} = 'rg-myusername' +${env:KEYVAULT_LOCATION} = 'westus2' +${env:KEYVAULT_SKU} = 'premium' +${env:AZURE_KEYVAULT_URL} = '<>' ``` For security reasons we do not set these environment variables automatically @@ -68,33 +65,47 @@ applications started outside the terminal, you could copy and paste the following commands: ```powershell -setx AZURE_TENANT_ID $env:AZURE_TENANT_ID -setx AZURE_CLIENT_ID $env:AZURE_CLIENT_ID -setx AZURE_CLIENT_SECRET $env:AZURE_CLIENT_SECRET -setx AZURE_SUBSCRIPTION_ID $env:AZURE_SUBSCRIPTION_ID -setx AZURE_RESOURCE_GROUP $env:AZURE_RESOURCE_GROUP -setx AZURE_LOCATION $env:AZURE_LOCATION -setx AZURE_SEARCH_STORAGE_NAME $env:AZURE_SEARCH_STORAGE_NAME -setx AZURE_SEARCH_STORAGE_KEY $env:AZURE_SEARCH_STORAGE_KEY +setx KEYVAULT_TENANT_ID ${env:KEYVAULT_TENANT_ID} +setx KEYVAULT_CLIENT_ID ${env:KEYVAULT_CLIENT_ID} +setx KEYVAULT_CLIENT_SECRET ${env:KEYVAULT_CLIENT_SECRET} +setx KEYVAULT_SUBSCRIPTION_ID ${env:KEYVAULT_SUBSCRIPTION_ID} +setx KEYVAULT_RESOURCE_GROUP ${env:KEYVAULT_RESOURCE_GROUP} +setx KEYVAULT_LOCATION ${env:KEYVAULT_LOCATION} +setx KEYVAULT_SKU ${env:KEYVAULT_SKU} +setx AZURE_KEYVAULT_URL ${env:AZURE_KEYVAULT_URL} ``` -After running or recording live tests, if you do not plan on further testing -you can remove the test resources you created above by running: -[Remove-TestResources.ps1][]: +### Cleaning up Resources + +By default, resource groups are tagged with a `DeleteAfter` value and date according to the default or specified +value for the `-DeleteAfterHours` switch. You can use this tag in scheduled jobs to remove older resources based +on that date. + +If you are not ready for the resources to be deleted, you can update the resource group by running [Update-TestResources.ps1][]: ```powershell -Remove-TestResources.ps1 -BaseName 'myusername' -Force +Update-TestResources.ps1 keyvault ``` -If you created a new service principal as shown above, you might also remove it: +This will extend the expiration time by the default value (e.g. 48 hours) from now. -```powershell -Remove-AzADServicePrincipal -ApplicationId $sp.ApplicationId -Force +Alternatively, after running or recording live tests, if you do not plan on further testing +you can immediately remove the test resources you created above by running [Remove-TestResources.ps1][]: +```powershell +Remove-TestResources.ps1 keyvault -Force ``` If you persisted environment variables, you should also remove those as well. +### Passing Additional Arguments + +Some test-resources.json templates utilize the `AdditionalParameters` parameter to control additional resource configuration options. For example: + +```powershell +New-TestResources.ps1 keyvault -AdditionalParameters @{enableHsm = $true} +``` + ## In CI Test pipelines should include deploy-test-resources.yml and @@ -123,10 +134,13 @@ Install-Module platyPS -Scope CurrentUser -Force New-MarkdownHelp -Command .\New-TestResources.ps1 -OutputFolder . -Force ``` +After the markdown files are generated, please make sure all "http" URIs use "https". + PowerShell markdown documentation created with [platyPS][]. - [New-TestResources.ps1]: ./New-TestResources.ps1.md - [Remove-TestResources.ps1]: ./Remove-TestResources.ps1.md + [New-TestResources.ps1]: https://aka.ms/azsdk/tools/New-TestResources + [Update-TestResources.ps1]: https://aka.ms/azsdk/tools/Update-TestResources + [Remove-TestResources.ps1]: https://aka.ms/azsdk/tools/Remove-TestResources [PowerShell]: https://github.com/PowerShell/PowerShell [PowerShellAz]: https://docs.microsoft.com/powershell/azure/install-az-ps [platyPS]: https://github.com/PowerShell/platyPS diff --git a/eng/common/TestResources/Remove-TestResources.ps1 b/eng/common/TestResources/Remove-TestResources.ps1 index 0c5c464bc870..ebb8b021ccf9 100644 --- a/eng/common/TestResources/Remove-TestResources.ps1 +++ b/eng/common/TestResources/Remove-TestResources.ps1 @@ -11,7 +11,7 @@ [CmdletBinding(DefaultParameterSetName = 'Default', SupportsShouldProcess = $true, ConfirmImpact = 'Medium')] param ( # Limit $BaseName to enough characters to be under limit plus prefixes, and https://docs.microsoft.com/azure/architecture/best-practices/resource-naming. - [Parameter(ParameterSetName = 'Default', Mandatory = $true, Position = 0)] + [Parameter(ParameterSetName = 'Default')] [Parameter(ParameterSetName = 'Default+Provisioner', Mandatory = $true, Position = 0)] [ValidatePattern('^[-a-zA-Z0-9\.\(\)_]{0,80}(?<=[a-zA-Z0-9\(\)])$')] [string] $BaseName, @@ -25,8 +25,7 @@ param ( [ValidateNotNullOrEmpty()] [string] $TenantId, - [Parameter(ParameterSetName = 'Default+Provisioner')] - [Parameter(ParameterSetName = 'ResourceGroup+Provisioner')] + [Parameter()] [ValidatePattern('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')] [string] $SubscriptionId, @@ -39,11 +38,14 @@ param ( [Parameter(ParameterSetName = 'ResourceGroup+Provisioner', Mandatory = $true)] [string] $ProvisionerApplicationSecret, - [Parameter()] + [Parameter(ParameterSetName = 'Default', Mandatory = $true, Position = 0)] + [Parameter(ParameterSetName = 'Default+Provisioner')] + [Parameter(ParameterSetName = 'ResourceGroup')] + [Parameter(ParameterSetName = 'ResourceGroup+Provisioner')] [string] $ServiceDirectory, [Parameter()] - [ValidateSet('AzureCloud', 'AzureUSGovernment', 'AzureChinaCloud')] + [ValidateSet('AzureCloud', 'AzureUSGovernment', 'AzureChinaCloud', 'Dogfood')] [string] $Environment = 'AzureCloud', [Parameter()] @@ -118,12 +120,67 @@ if ($ProvisionerApplicationId) { } } +$context = Get-AzContext + +# Make sure $BaseName is set. +if (!$BaseName) { + + $UserName = if ($env:USER) { $env:USER } else { "${env:USERNAME}" } + # Remove spaces, etc. that may be in $UserName + $UserName = $UserName -replace '\W' + + $BaseName = "$UserName$ServiceDirectory" + Log "BaseName was not set. Using default base name '$BaseName'" +} + if (!$ResourceGroupName) { # Format the resource group name like in New-TestResources.ps1. $ResourceGroupName = "rg-$BaseName" } -if (![string]::IsNullOrWhiteSpace($ServiceDirectory)) { +# If no subscription was specified, try to select the Azure SDK Developer Playground subscription. +# Ignore errors to leave the automatically selected subscription. +if ($SubscriptionId) { + $currentSubcriptionId = $context.Subscription.Id + if ($currentSubcriptionId -ne $SubscriptionId) { + Log "Selecting subscription '$SubscriptionId'" + $null = Select-AzSubscription -Subscription $SubscriptionId + + $exitActions += { + Log "Selecting previous subscription '$currentSubcriptionId'" + $null = Select-AzSubscription -Subscription $currentSubcriptionId + } + + # Update the context. + $context = Get-AzContext + } +} else { + Log "Attempting to select subscription 'Azure SDK Developer Playground (faa080af-c1d8-40ad-9cce-e1a450ca5b57)'" + $null = Select-AzSubscription -Subscription 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' -ErrorAction Ignore + + # Update the context. + $context = Get-AzContext + + $SubscriptionId = $context.Subscription.Id + $PSBoundParameters['SubscriptionId'] = $SubscriptionId +} + +# Use cache of well-known team subs without having to be authenticated. +$wellKnownSubscriptions = @{ + 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' = 'Azure SDK Developer Playground' + 'a18897a6-7e44-457d-9260-f2854c0aca42' = 'Azure SDK Engineering System' + '2cd617ea-1866-46b1-90e3-fffb087ebf9b' = 'Azure SDK Test Resources' +} + +# Print which subscription is currently selected. +$subscriptionName = $context.Subscription.Id +if ($wellKnownSubscriptions.ContainsKey($subscriptionName)) { + $subscriptionName = '{0} ({1})' -f $wellKnownSubscriptions[$subscriptionName], $subscriptionName +} + +Log "Selected subscription '$subscriptionName'" + +if ($ServiceDirectory) { $root = [System.IO.Path]::Combine("$PSScriptRoot/../../../sdk", $ServiceDirectory) | Resolve-Path $preRemovalScript = Join-Path -Path $root -ChildPath 'remove-test-resources-pre.ps1' if (Test-Path $preRemovalScript) { @@ -135,11 +192,36 @@ if (![string]::IsNullOrWhiteSpace($ServiceDirectory)) { &$preRemovalScript @PSBoundParameters } + + # Make sure environment files from New-TestResources -OutFile are removed. + Get-ChildItem -Path $root -Filter test-resources.json.env -Recurse | Remove-Item -Force:$Force +} + +$verifyDeleteScript = { + try { + $group = Get-AzResourceGroup -name $ResourceGroupName + } catch { + if ($_.ToString().Contains("Provided resource group does not exist")) { + Write-Verbose "Resource group '$ResourceGroupName' not found. Continuing..." + return + } + throw $_ + } + + if ($group.ProvisioningState -ne "Deleting") + { + throw "Resource group is in '$($group.ProvisioningState)' state, expected 'Deleting'" + } } Log "Deleting resource group '$ResourceGroupName'" -if (Retry { Remove-AzResourceGroup -Name "$ResourceGroupName" -Force:$Force }) { - Write-Verbose "Successfully deleted resource group '$ResourceGroupName'" +if ($Force) { + Remove-AzResourceGroup -Name "$ResourceGroupName" -Force:$Force -AsJob + Retry $verifyDeleteScript 3 + Write-Verbose "Requested async deletion of resource group '$ResourceGroupName'" +} else { + # Don't swallow interactive confirmation when Force is false + Remove-AzResourceGroup -Name "$ResourceGroupName" -Force:$Force } $exitActions.Invoke() @@ -147,6 +229,7 @@ $exitActions.Invoke() <# .SYNOPSIS Deletes the resource group deployed for a service directory from Azure. + .DESCRIPTION Removes a resource group and all its resources previously deployed using New-TestResources.ps1. @@ -155,32 +238,48 @@ you will be asked to log in with Connect-AzAccount. Alternatively, you (or a build pipeline) can pass $ProvisionerApplicationId and $ProvisionerApplicationSecret to authenticate a service principal with access to create resources. + .PARAMETER BaseName A name to use in the resource group and passed to the ARM template as 'baseName'. This will delete the resource group named 'rg-' + .PARAMETER ResourceGroupName The name of the resource group to delete. + .PARAMETER TenantId The tenant ID of a service principal when a provisioner is specified. + .PARAMETER SubscriptionId -Optional subscription ID to use for new resources when logging in as a +Optional subscription ID to use when deleting resources when logging in as a provisioner. You can also use Set-AzContext if not provisioning. + +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + .PARAMETER ProvisionerApplicationId A service principal ID to provision test resources when a provisioner is specified. + .PARAMETER ProvisionerApplicationSecret A service principal secret (password) to provision test resources when a provisioner is specified. + .PARAMETER ServiceDirectory A directory under 'sdk' in the repository root - optionally with subdirectories specified - in which to discover pre removal script named 'remove-test-resources-pre.json'. + .PARAMETER Environment Name of the cloud environment. The default is the Azure Public Cloud ('PublicCloud') + .PARAMETER Force Force removal of resource group without asking for user confirmation + .EXAMPLE -Remove-TestResources.ps1 -BaseName 'uuid123' -Force -Use the currently logged-in account to delete the resource group by the name of -'rg-uuid123' +Remove-TestResources.ps1 keyvault -Force +Use the currently logged-in account to delete the resources created for Key Vault testing. + .EXAMPLE Remove-TestResources.ps1 ` -ResourceGroupName "${env:AZURE_RESOURCEGROUP_NAME}" ` @@ -192,6 +291,5 @@ Remove-TestResources.ps1 ` When run in the context of an Azure DevOps pipeline, this script removes the resource group whose name is stored in the environment variable AZURE_RESOURCEGROUP_NAME. -.LINK -New-TestResources.ps1 -#> \ No newline at end of file + +#> diff --git a/eng/common/TestResources/Remove-TestResources.ps1.md b/eng/common/TestResources/Remove-TestResources.ps1.md index f9bc1803ae32..4b6159b86044 100644 --- a/eng/common/TestResources/Remove-TestResources.ps1.md +++ b/eng/common/TestResources/Remove-TestResources.ps1.md @@ -14,34 +14,37 @@ Deletes the resource group deployed for a service directory from Azure. ### Default (Default) ``` -Remove-TestResources.ps1 [-BaseName] [-Environment ] [-Force] [-WhatIf] [-Confirm] +Remove-TestResources.ps1 [-BaseName ] [-SubscriptionId ] [-ServiceDirectory] + [-Environment ] [-Force] [-RemoveTestResourcesRemainingArguments ] [-WhatIf] [-Confirm] [] ``` ### Default+Provisioner ``` -Remove-TestResources.ps1 [-BaseName] -TenantId [-SubscriptionId ] - -ProvisionerApplicationId -ProvisionerApplicationSecret [-Environment ] [-Force] - [-WhatIf] [-Confirm] [] +Remove-TestResources.ps1 -BaseName -TenantId [-SubscriptionId ] + -ProvisionerApplicationId -ProvisionerApplicationSecret [[-ServiceDirectory] ] + [-Environment ] [-Force] [-RemoveTestResourcesRemainingArguments ] [-WhatIf] [-Confirm] + [] ``` ### ResourceGroup+Provisioner ``` Remove-TestResources.ps1 -ResourceGroupName -TenantId [-SubscriptionId ] - -ProvisionerApplicationId -ProvisionerApplicationSecret [-Environment ] [-Force] - [-WhatIf] [-Confirm] [] + -ProvisionerApplicationId -ProvisionerApplicationSecret [[-ServiceDirectory] ] + [-Environment ] [-Force] [-RemoveTestResourcesRemainingArguments ] [-WhatIf] [-Confirm] + [] ``` ### ResourceGroup ``` -Remove-TestResources.ps1 -ResourceGroupName [-Environment ] [-Force] [-WhatIf] [-Confirm] +Remove-TestResources.ps1 -ResourceGroupName [-SubscriptionId ] [[-ServiceDirectory] ] + [-Environment ] [-Force] [-RemoveTestResourcesRemainingArguments ] [-WhatIf] [-Confirm] [] ``` ## DESCRIPTION Removes a resource group and all its resources previously deployed using New-TestResources.ps1. - If you are not currently logged into an account in the Az PowerShell module, you will be asked to log in with Connect-AzAccount. Alternatively, you (or a @@ -53,12 +56,10 @@ create resources. ### EXAMPLE 1 ``` -Remove-TestResources.ps1 -BaseName 'uuid123' -Force +Remove-TestResources.ps1 keyvault -Force +Use the currently logged-in account to delete the resources created for Key Vault testing. ``` -Use the currently logged-in account to delete the resource group by the name of -'rg-uuid123' - ### EXAMPLE 2 ``` Remove-TestResources.ps1 ` @@ -68,11 +69,10 @@ Remove-TestResources.ps1 ` -ProvisionerApplicationSecret '$(AppSecret)' ` -Force ` -Verbose ` -``` - When run in the context of an Azure DevOps pipeline, this script removes the resource group whose name is stored in the environment variable AZURE_RESOURCEGROUP_NAME. +``` ## PARAMETERS @@ -82,11 +82,23 @@ This will delete the resource group named 'rg-\' ```yaml Type: String -Parameter Sets: Default, Default+Provisioner +Parameter Sets: Default +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +```yaml +Type: String +Parameter Sets: Default+Provisioner Aliases: Required: True -Position: 1 +Position: Named Default value: None Accept pipeline input: False Accept wildcard characters: False @@ -123,13 +135,19 @@ Accept wildcard characters: False ``` ### -SubscriptionId -Optional subscription ID to use for new resources when logging in as a +Optional subscription ID to use when deleting resources when logging in as a provisioner. You can also use Set-AzContext if not provisioning. +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + ```yaml Type: String -Parameter Sets: Default+Provisioner, ResourceGroup+Provisioner +Parameter Sets: (All) Aliases: Required: False @@ -171,15 +189,27 @@ Accept wildcard characters: False ### -ServiceDirectory A directory under 'sdk' in the repository root - optionally with subdirectories -specified - specified - in which to discover pre removal script named 'remove-test-resources-pre.json'. +specified - in which to discover pre removal script named 'remove-test-resources-pre.json'. ```yaml Type: String -Parameter Sets: (All) +Parameter Sets: Default +Aliases: + +Required: True +Position: 1 +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +```yaml +Type: String +Parameter Sets: Default+Provisioner, ResourceGroup+Provisioner, ResourceGroup Aliases: Required: False -Position: Named +Position: 1 Default value: None Accept pipeline input: False Accept wildcard characters: False @@ -217,6 +247,21 @@ Accept pipeline input: False Accept wildcard characters: False ``` +### -RemoveTestResourcesRemainingArguments +Captures any arguments not declared here (no parameter errors) + +```yaml +Type: Object +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + ### -WhatIf Shows what would happen if the cmdlet runs. The cmdlet is not run. @@ -249,7 +294,7 @@ Accept wildcard characters: False ``` ### CommonParameters -This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](https://go.microsoft.com/fwlink/?LinkID=113216). ## INPUTS @@ -258,5 +303,3 @@ This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable ## NOTES ## RELATED LINKS - -[New-TestResources.ps1](./New-TestResources.ps1.md) diff --git a/eng/common/TestResources/Update-TestResources.cmd b/eng/common/TestResources/Update-TestResources.cmd new file mode 100644 index 000000000000..94b0c1f2db1a --- /dev/null +++ b/eng/common/TestResources/Update-TestResources.cmd @@ -0,0 +1,17 @@ +@echo off + +REM Copyright (c) Microsoft Corporation. All rights reserved. +REM Licensed under the MIT License. + +setlocal + +for /f "usebackq delims=" %%i in (`where pwsh 2^>nul`) do ( + set _cmd=%%i +) + +if "%_cmd%"=="" ( + echo Error: PowerShell not found. Please visit https://github.com/powershell/powershell for install instructions. + exit /b 2 +) + +call "%_cmd%" -NoLogo -NoProfile -File "%~dpn0.ps1" %* diff --git a/eng/common/TestResources/Update-TestResources.ps1 b/eng/common/TestResources/Update-TestResources.ps1 new file mode 100644 index 000000000000..64f319e1babb --- /dev/null +++ b/eng/common/TestResources/Update-TestResources.ps1 @@ -0,0 +1,206 @@ +#!/usr/bin/env pwsh + +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +#Requires -Version 6.0 +#Requires -PSEdition Core +#Requires -Modules @{ModuleName='Az.Accounts'; ModuleVersion='1.6.4'} +#Requires -Modules @{ModuleName='Az.Resources'; ModuleVersion='1.8.0'} + +[CmdletBinding(DefaultParameterSetName = 'Default')] +param ( + [Parameter(ParameterSetName = 'Default', Mandatory = $true, Position = 0)] + [string] $ServiceDirectory, + + [Parameter(ParameterSetName = 'Default')] + [ValidatePattern('^[-a-zA-Z0-9\.\(\)_]{0,80}(?<=[a-zA-Z0-9\(\)])$')] + [string] $BaseName, + + [Parameter(ParameterSetName = 'ResourceGroup')] + [ValidatePattern('^[-\w\._\(\)]+$')] + [string] $ResourceGroupName, + + [Parameter()] + [ValidatePattern('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')] + [string] $SubscriptionId, + + [Parameter()] + [ValidateRange(1, [int]::MaxValue)] + [int] $DeleteAfterHours = 48 +) + +# By default stop for any error. +if (!$PSBoundParameters.ContainsKey('ErrorAction')) { + $ErrorActionPreference = 'Stop' +} + +function Log($Message) { + Write-Host ('{0} - {1}' -f [DateTime]::Now.ToLongTimeString(), $Message) +} + +function Retry([scriptblock] $Action, [int] $Attempts = 5) { + $attempt = 0 + $sleep = 5 + + while ($attempt -lt $Attempts) { + try { + $attempt++ + return $Action.Invoke() + } catch { + if ($attempt -lt $Attempts) { + $sleep *= 2 + + Write-Warning "Attempt $attempt failed: $_. Trying again in $sleep seconds..." + Start-Sleep -Seconds $sleep + } else { + Write-Error -ErrorRecord $_ + } + } + } +} + +# Support actions to invoke on exit. +$exitActions = @({ + if ($exitActions.Count -gt 1) { + Write-Verbose 'Running registered exit actions' + } +}) + +# Make sure $BaseName is set. +if (!$BaseName) { + $UserName = if ($env:USER) { $env:USER } else { "${env:USERNAME}" } + # Remove spaces, etc. that may be in $UserName + $UserName = $UserName -replace '\W' + + $BaseName = "$UserName$ServiceDirectory" + Log "BaseName was not set. Using default base name '$BaseName'" +} + +# Make sure $ResourceGroupName is set. +if (!$ResourceGroupName) { + $ResourceGroupName = "rg-$BaseName" +} + +# This script is intended for interactive users. Make sure they are logged in or fail. +$context = Get-AzContext +if (!$context) { + throw "You must be already logged in to use this script. Run 'Connect-AzAccount' and try again." +} + +# If no subscription was specified, try to select the Azure SDK Developer Playground subscription. +# Ignore errors to leave the automatically selected subscription. +if ($SubscriptionId) { + $currentSubcriptionId = $context.Subscription.Id + if ($currentSubcriptionId -ne $SubscriptionId) { + Log "Selecting subscription '$SubscriptionId'" + $null = Select-AzSubscription -Subscription $SubscriptionId + + $exitActions += { + Log "Selecting previous subscription '$currentSubcriptionId'" + $null = Select-AzSubscription -Subscription $currentSubcriptionId + } + + # Update the context. + $context = Get-AzContext + } +} else { + Log "Attempting to select subscription 'Azure SDK Developer Playground (faa080af-c1d8-40ad-9cce-e1a450ca5b57)'" + $null = Select-AzSubscription -Subscription 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' -ErrorAction Ignore + + # Update the context. + $context = Get-AzContext + + $SubscriptionId = $context.Subscription.Id + $PSBoundParameters['SubscriptionId'] = $SubscriptionId +} + +# Use cache of well-known team subs without having to be authenticated. +$wellKnownSubscriptions = @{ + 'faa080af-c1d8-40ad-9cce-e1a450ca5b57' = 'Azure SDK Developer Playground' + 'a18897a6-7e44-457d-9260-f2854c0aca42' = 'Azure SDK Engineering System' + '2cd617ea-1866-46b1-90e3-fffb087ebf9b' = 'Azure SDK Test Resources' +} + +# Print which subscription is currently selected. +$subscriptionName = $context.Subscription.Id +if ($wellKnownSubscriptions.ContainsKey($subscriptionName)) { + $subscriptionName = '{0} ({1})' -f $wellKnownSubscriptions[$subscriptionName], $subscriptionName +} + +Log "Selected subscription '$subscriptionName'" + +# try..finally will also trap Ctrl+C. +try { + Log "Getting resource group '$ResourceGroupName'" + + $resourceGroup = Get-AzResourceGroup -Name $ResourceGroupName + + # Update DeleteAfter + $deleteAfter = [DateTime]::UtcNow.AddHours($DeleteAfterHours).ToString('o') + + Log "Updating DeleteAfter to '$deleteAfter'" + Write-Warning "Any clean-up scripts running against subscription '$SubscriptionId' may delete resource group '$ResourceGroupName' after $DeleteAfterHours hours." + $resourceGroup.Tags['DeleteAfter'] = $deleteAfter + + Log "Updating resource group '$ResourceGroupName'" + Retry { + # Allow the resource group to write to output. + Set-AzResourceGroup -Name $ResourceGroupName -Tag $resourceGroup.Tags + } +} finally { + $exitActions.Invoke() +} + +<# +.SYNOPSIS +Updates a resource group previously deployed for a service directory. + +.DESCRIPTION +Updates a resource group that was created using New-TestResources.ps1. +You can use this, for example, to update the `DeleteAfterHours` property +to keep an existing resource group deployed for a longer period of time. + +.PARAMETER ServiceDirectory +A directory under 'sdk' in the repository root - optionally with subdirectories +specified - in which to discover ARM templates named 'test-resources.json'. +This can also be an absolute path or specify parent directories. + +.PARAMETER BaseName +A name to use in the resource group and passed to the ARM template as 'baseName'. +This will update the resource group named 'rg-' + +.PARAMETER ResourceGroupName +The name of the resource group to update. + +.PARAMETER SubscriptionId +Optional subscription ID to use when deleting resources when logging in as a +provisioner. You can also use Set-AzContext if not provisioning. + +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + +.PARAMETER DeleteAfterHours +Positive integer number of hours from the current time to set the +'DeleteAfter' tag on the created resource group. The computed value is a +timestamp of the form "2020-03-04T09:07:04.3083910Z". + +An optional cleanup process can delete resource groups whose "DeleteAfter" +timestamp is less than the current time. + +.EXAMPLE +Update-TestResources.ps1 keyvault -DeleteAfterHours 24 + +Update the 'rg-${USERNAME}keyvault` resource group to be deleted after 24 +hours from now if a clean-up script is running against the current subscription. + +.EXAMPLE +Update-TestResources.ps1 -ResourceGroupName rg-userkeyvault -Subscription fa9c6912-f641-4226-806c-5139584b89ca + +Update the 'rg-userkeyvault' resource group to be deleted after 48 +hours from now if a clean-up script is running against the subscription 'fa9c6912-f641-4226-806c-5139584b89ca'. + +#> diff --git a/eng/common/TestResources/Update-TestResources.ps1.md b/eng/common/TestResources/Update-TestResources.ps1.md new file mode 100644 index 000000000000..17d31511cec6 --- /dev/null +++ b/eng/common/TestResources/Update-TestResources.ps1.md @@ -0,0 +1,153 @@ +--- +external help file: -help.xml +Module Name: +online version: +schema: 2.0.0 +--- + +# Update-TestResources.ps1 + +## SYNOPSIS +Updates a resource group previously deployed for a service directory. + +## SYNTAX + +### Default (Default) +``` +Update-TestResources.ps1 [-ServiceDirectory] [-BaseName ] [-SubscriptionId ] + [-DeleteAfterHours ] [] +``` + +### ResourceGroup +``` +Update-TestResources.ps1 [-ResourceGroupName ] [-SubscriptionId ] [-DeleteAfterHours ] + [] +``` + +## DESCRIPTION +Updates a resource group that was created using New-TestResources.ps1. +You can use this, for example, to update the \`DeleteAfterHours\` property +to keep an existing resource group deployed for a longer period of time. + +## EXAMPLES + +### EXAMPLE 1 +``` +Update-TestResources.ps1 keyvault -DeleteAfterHours 24 +``` + +Update the 'rg-${USERNAME}keyvault\` resource group to be deleted after 24 +hours from now if a clean-up script is running against the current subscription. + +### EXAMPLE 2 +``` +Update-TestResources.ps1 -ResourceGroupName rg-userkeyvault -Subscription fa9c6912-f641-4226-806c-5139584b89ca +``` + +Update the 'rg-userkeyvault' resource group to be deleted after 48 +hours from now if a clean-up script is running against the subscription 'fa9c6912-f641-4226-806c-5139584b89ca'. + +## PARAMETERS + +### -ServiceDirectory +A directory under 'sdk' in the repository root - optionally with subdirectories +specified - in which to discover ARM templates named 'test-resources.json'. +This can also be an absolute path or specify parent directories. + +```yaml +Type: String +Parameter Sets: Default +Aliases: + +Required: True +Position: 1 +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -BaseName +A name to use in the resource group and passed to the ARM template as 'baseName'. +This will update the resource group named 'rg-\' + +```yaml +Type: String +Parameter Sets: Default +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group to update. + +```yaml +Type: String +Parameter Sets: ResourceGroup +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +Optional subscription ID to use when deleting resources when logging in as a +provisioner. +You can also use Set-AzContext if not provisioning. + +If you do not specify a SubscriptionId and are not logged in, one will be +automatically selected for you by the Connect-AzAccount cmdlet. + +Once you are logged in (or were previously), the selected SubscriptionId +will be used for subsequent operations that are specific to a subscription. + +```yaml +Type: String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DeleteAfterHours +Positive integer number of hours from the current time to set the +'DeleteAfter' tag on the created resource group. +The computed value is a +timestamp of the form "2020-03-04T09:07:04.3083910Z". + +An optional cleanup process can delete resource groups whose "DeleteAfter" +timestamp is less than the current time. + +```yaml +Type: Int32 +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: 48 +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](https://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +## NOTES + +## RELATED LINKS diff --git a/eng/common/TestResources/deploy-test-resources.yml b/eng/common/TestResources/deploy-test-resources.yml index b875a806b143..a9d693b99420 100644 --- a/eng/common/TestResources/deploy-test-resources.yml +++ b/eng/common/TestResources/deploy-test-resources.yml @@ -1,40 +1,58 @@ parameters: ServiceDirectory: not-set ArmTemplateParameters: '@{}' - DeleteAfterHours: 24 + DeleteAfterHours: 8 Location: '' SubscriptionConfiguration: $(sub-config-azure-cloud-test-resources) -# SubscriptionConfiguration will be splat into the parameters of the test +# SubscriptionConfiguration will be splatted into the parameters of the test # resources script. It should be JSON in the form: # { # "SubscriptionId": "", # "TenantId": "", # "TestApplicationId": "", # "TestApplicationSecret": "", -# "ProvisionerApplicationId": "", -# "ProvisionerApplicationSecret": "", +# "ProvisionerApplicationId": "", +# "ProvisionerApplicationSecret": "", # "Environment": "AzureCloud | AzureGov | AzureChina | " +# "EnvironmentVariables": { +# "SERVICE_MANAGEMENT_URL": "", +# "STORAGE_ENDPOINT_SUFFIX": "", +# "RESOURCE_MANAGER_URL": "", +# "SEARCH_ENDPOINT_SUFFIX": "", +# "COSMOS_TABLES_ENDPOINT_SUFFIX": "" +# }, +# "ArmTemplateParameters": { +# "keyVaultDomainSuffix": "", +# "storageEndpointSuffix": "", +# "endpointSuffix": "", +# "azureAuthorityHost": "", +# "keyVaultEndpointSuffix": "" +# } # } + steps: - # New-TestResources command requires Az module - - pwsh: Install-Module -Name Az -Scope CurrentUser -AllowClobber -Force -Verbose - displayName: Install Azure PowerShell module + - template: /eng/common/TestResources/setup-az-modules.yml - pwsh: | - $subscriptionConfiguration = @" + eng/common/TestResources/Import-AzModules.ps1 + + $subscriptionConfiguration = @' ${{ parameters.SubscriptionConfiguration }} - "@ | ConvertFrom-Json -AsHashtable; + '@ | ConvertFrom-Json -AsHashtable; + # The subscriptionConfiguration may have ArmTemplateParameters defined, so + # pass those in via the ArmTemplateParameters flag, and handle any + # additional parameters from the pipelines via AdditionalParameters eng/common/TestResources/New-TestResources.ps1 ` -BaseName 'Generated' ` - -ServiceDirectory ${{ parameters.ServiceDirectory }} ` + -ServiceDirectory '${{ parameters.ServiceDirectory }}' ` -Location '${{ parameters.Location }}' ` - -DeleteAfterHours ${{ parameters.DeleteAfterHours }} ` - -AdditionalParameters ${{ parameters.ArmTemplateParameters }} ` + -DeleteAfterHours '${{ parameters.DeleteAfterHours }}' ` @subscriptionConfiguration ` + -AdditionalParameters ${{ parameters.ArmTemplateParameters }} ` -CI ` -Force ` - -Verbose + -Verbose | Out-Null displayName: Deploy test resources diff --git a/eng/common/TestResources/remove-test-resources.yml b/eng/common/TestResources/remove-test-resources.yml index 767f8c8c516b..a0871a2ff7ad 100644 --- a/eng/common/TestResources/remove-test-resources.yml +++ b/eng/common/TestResources/remove-test-resources.yml @@ -12,8 +12,8 @@ parameters: # "TenantId": "", # "TestApplicationId": "", # "TestApplicationSecret": "", -# "ProvisionerApplicationId": "", -# "ProvisoinerApplicationSecret": "", +# "ProvisionerApplicationId": "", +# "ProvisionerApplicationSecret": "", # "Environment": "AzureCloud | AzureGov | AzureChina | " # } # The Remove-TestResources.ps1 script accommodates extra parameters so it will @@ -21,13 +21,15 @@ parameters: steps: - pwsh: | + eng/common/TestResources/Import-AzModules.ps1 + $subscriptionConfiguration = @" ${{ parameters.SubscriptionConfiguration }} "@ | ConvertFrom-Json -AsHashtable; eng/common/TestResources/Remove-TestResources.ps1 ` -ResourceGroupName "${env:AZURE_RESOURCEGROUP_NAME}" ` - -ServiceDirectory ${{ parameters.ServiceDirectory }} ` + -ServiceDirectory "${{ parameters.ServiceDirectory }}" ` @subscriptionConfiguration ` -Force ` -Verbose diff --git a/eng/common/TestResources/setup-az-modules.yml b/eng/common/TestResources/setup-az-modules.yml new file mode 100644 index 000000000000..b5369f433b90 --- /dev/null +++ b/eng/common/TestResources/setup-az-modules.yml @@ -0,0 +1,34 @@ +# Cloud Configuration will be splat into parameters of `Add-AzEnvironment`. It +# should be JSON in the form (not all fields are required): +# { +# "Name": "", +# "PublishSettingsFileUrl": "", +# "ServiceEndpoint": "", +# "ManagementPortalUrl": "", +# "ActiveDirectoryEndpoint": "", +# "ActiveDirectoryServiceEndpointResourceId": "", +# "ResourceManagerEndpoint": "", +# "GalleryEndpoint": "", +# "GraphEndpoint": "", +# "GraphAudience": "", +# "AzureKeyVaultDnsSuffix": "", +# "AzureKeyVaultServiceEndpointResourceId": "" +# } + +steps: + - bash: sudo chown -R runner ~/.Azure + displayName: (MacOS) Grant access to ~/.Azure + condition: contains(variables['OSVmImage'], 'mac') + + - task: Powershell@2 + inputs: + displayName: Register Dogfood environment + targetType: inline + pwsh: true + script: | + eng/common/TestResources/Import-AzModules.ps1 + + $environmentSpec = @" + $(env-config-dogfood) + "@ | ConvertFrom-Json -AsHashtable; + Add-AzEnvironment @environmentSpec diff --git a/eng/common/Update-Change-Log.ps1 b/eng/common/Update-Change-Log.ps1 index a819a05e8bb2..3d1497f29863 100644 --- a/eng/common/Update-Change-Log.ps1 +++ b/eng/common/Update-Change-Log.ps1 @@ -1,4 +1,4 @@ -# Note: This script will add or replace version title in change log +# Note: This script will add or replace version title in change log # Parameter description # Version : Version to add or replace in change log @@ -12,7 +12,8 @@ param ( [Parameter(Mandatory = $true)] [String]$ChangeLogPath, [String]$Unreleased = $True, - [String]$ReplaceVersion = $False + [String]$ReplaceVersion = $False, + [String]$ReleaseDate ) @@ -46,8 +47,12 @@ function Get-VersionTitle($Version, $Unreleased) # Generate version title $newVersionTitle = "## $Version $UNRELEASED_TAG" if ($Unreleased -eq $False) { - $releaseDate = Get-Date -Format "(yyyy-MM-dd)" - $newVersionTitle = "## $Version $releaseDate" + $actualReleaseDate = $ReleaseDate; + + if (!$actualReleaseDate) { + $actualReleaseDate = Get-Date -Format "yyyy-MM-dd" + } + $newVersionTitle = "## $Version ($actualReleaseDate)" } return $newVersionTitle } @@ -95,7 +100,7 @@ function Get-NewChangeLog( [System.Collections.ArrayList]$ChangelogLines, $Versi exit(0) } - if (($ReplaceVersion -eq $True) -and ($Unreleased -eq $False) -and $CurrentTitle.Contains($version) -and (-not $CurrentTitle.Contains($UNRELEASED_TAG))) { + if (($ReplaceVersion -eq $True) -and ($Unreleased -eq $False) -and $CurrentTitle.Contains($version) -and (-not $CurrentTitle.Contains($UNRELEASED_TAG)) -and (-not $ReleaseDate)) { Write-Host "Version is already present in change log with a release date." exit(0) } diff --git a/eng/common/docgeneration/Generate-DocIndex.ps1 b/eng/common/docgeneration/Generate-DocIndex.ps1 new file mode 100644 index 000000000000..82b3a75e4e53 --- /dev/null +++ b/eng/common/docgeneration/Generate-DocIndex.ps1 @@ -0,0 +1,181 @@ +# Generates an index page for cataloging different versions of the Docs +[CmdletBinding()] +Param ( + $DocFx, + $RepoRoot, + $DocGenDir, + $DocOutDir = "${RepoRoot}/docfx_project", + $DocfxJsonPath = "${PSScriptRoot}\docfx.json", + $MainJsPath = "${PSScriptRoot}\templates\matthews\styles\main.js" +) +. "${PSScriptRoot}\..\scripts\common.ps1" + +# Given the github io blob storage url and language regex, +# the helper function will return a list of artifact names. +function Get-BlobStorage-Artifacts($blobStorageUrl, $blobDirectoryRegex, $blobArtifactsReplacement) { + LogDebug "Reading artifact from storage blob ..." + $returnedArtifacts = @() + $pageToken = "" + Do { + $resp = "" + if (!$pageToken) { + # First page call. + $resp = Invoke-RestMethod -Method Get -Uri $blobStorageUrl + } + else { + # Next page call + $blobStorageUrlPageToken = $blobStorageUrl + "&marker=$pageToken" + $resp = Invoke-RestMethod -Method Get -Uri $blobStorageUrlPageToken + } + # Convert to xml documents. + $xmlDoc = [xml](removeBomFromString $resp) + foreach ($elem in $xmlDoc.EnumerationResults.Blobs.BlobPrefix) { + # What service return like "dotnet/Azure.AI.Anomalydetector/", needs to fetch out "Azure.AI.Anomalydetector" + $artifact = $elem.Name -replace $blobDirectoryRegex, $blobArtifactsReplacement + $returnedArtifacts += $artifact + } + # Fetch page token + $pageToken = $xmlDoc.EnumerationResults.NextMarker + } while ($pageToken) + return $returnedArtifacts + } + +# The sequence of Bom bytes differs by different encoding. +# The helper function here is only to strip the utf-8 encoding system as it is used by blob storage list api. +# Return the original string if not in BOM utf-8 sequence. +function RemoveBomFromString([string]$bomAwareString) { + if ($bomAwareString.length -le 3) { + return $bomAwareString + } + $bomPatternByteArray = [byte[]] (0xef, 0xbb, 0xbf) + # The default encoding for powershell is ISO-8859-1, so converting bytes with the encoding. + $bomAwareBytes = [Text.Encoding]::GetEncoding(28591).GetBytes($bomAwareString.Substring(0, 3)) + if (@(Compare-Object $bomPatternByteArray $bomAwareBytes -SyncWindow 0).Length -eq 0) { + return $bomAwareString.Substring(3) + } + return $bomAwareString +} + +function Get-TocMapping { + Param ( + [Parameter(Mandatory = $true)] [Object[]] $metadata, + [Parameter(Mandatory = $true)] [String[]] $artifacts + ) + # Used for sorting the toc display order + $orderServiceMapping = @{} + + foreach ($artifact in $artifacts) { + $packageInfo = $metadata | ? { $_.Package -eq $artifact -and $_.Hide -ne "true" } + $serviceName = "" + $displayName = "" + if (!$packageInfo) { + LogDebug "There is no service name for artifact $artifact or it is marked as hidden. Please check csv of Azure/azure-sdk/_data/release/latest repo if this is intended. " + continue + } + elseif (!$packageInfo[0].ServiceName) { + LogWarning "There is no service name for artifact $artifact. Please check csv of Azure/azure-sdk/_data/release/latest repo if this is intended. " + # If no service name retrieved, print out warning message, and put it into Other page. + $serviceName = "Other" + $displayName = $packageInfo[0].DisplayName.Trim() + } + else { + if ($packageInfo.Length -gt 1) { + LogWarning "There are more than 1 packages fetched out for artifact $artifact. Please check csv of Azure/azure-sdk/_data/release/latest repo if this is intended. " + } + $serviceName = $packageInfo[0].ServiceName.Trim() + $displayName = $packageInfo[0].DisplayName.Trim() + } + $orderServiceMapping[$artifact] = @($serviceName, $displayName) + } + return $orderServiceMapping +} + +function GenerateDocfxTocContent([Hashtable]$tocContent, [String]$lang) { + LogDebug "Start generating the docfx toc and build docfx site..." + + LogDebug "Initializing Default DocFx Site..." + & $($DocFx) init -q -o "${DocOutDir}" + # The line below is used for testing in local + #docfx init -q -o "${DocOutDir}" + LogDebug "Copying template and configuration..." + New-Item -Path "${DocOutDir}" -Name "templates" -ItemType "directory" -Force + Copy-Item "${DocGenDir}/templates/*" -Destination "${DocOutDir}/templates" -Force -Recurse + Copy-Item "${DocGenDir}/docfx.json" -Destination "${DocOutDir}/" -Force + $YmlPath = "${DocOutDir}/api" + New-Item -Path $YmlPath -Name "toc.yml" -Force + $visitedService = @{} + # Sort and display toc service name by alphabetical order, and then sort artifact by order. + foreach ($serviceMapping in ($tocContent.GetEnumerator() | Sort-Object Value, Key)) { + $artifact = $serviceMapping.Key + $serviceName = $serviceMapping.Value[0] + $displayName = $serviceMapping.Value[1] + + $fileName = ($serviceName -replace '\s', '').ToLower().Trim() + if ($visitedService.ContainsKey($serviceName)) { + if ($displayName) { + Add-Content -Path "$($YmlPath)/${fileName}.md" -Value "#### $artifact`n##### ($displayName)" + } + else { + Add-Content -Path "$($YmlPath)/${fileName}.md" -Value "#### $artifact" + } + } + else { + Add-Content -Path "$($YmlPath)/toc.yml" -Value "- name: ${serviceName}`r`n href: ${fileName}.md" + New-Item -Path $YmlPath -Name "${fileName}.md" -Force + if ($displayName) { + Add-Content -Path "$($YmlPath)/${fileName}.md" -Value "#### $artifact`n##### ($displayName)" + } + else { + Add-Content -Path "$($YmlPath)/${fileName}.md" -Value "#### $artifact" + } + $visitedService[$serviceName] = $true + } + } + + # Generate toc homepage. + LogDebug "Creating Site Title and Navigation..." + New-Item -Path "${DocOutDir}" -Name "toc.yml" -Force + Add-Content -Path "${DocOutDir}/toc.yml" -Value "- name: Azure SDK for $lang APIs`r`n href: api/`r`n homepage: api/index.md" + + LogDebug "Copying root markdowns" + Copy-Item "$($RepoRoot)/README.md" -Destination "${DocOutDir}/api/index.md" -Force + Copy-Item "$($RepoRoot)/CONTRIBUTING.md" -Destination "${DocOutDir}/api/CONTRIBUTING.md" -Force + + LogDebug "Building site..." + & $($DocFx) build "${DocOutDir}/docfx.json" + # The line below is used for testing in local + #docfx build "${DocOutDir}/docfx.json" + Copy-Item "${DocGenDir}/assets/logo.svg" -Destination "${DocOutDir}/_site/" -Force +} + +function UpdateDocIndexFiles { + Param ( + [Parameter(Mandatory=$false)] [String]$appTitleLang = $Language, + [Parameter(Mandatory=$false)] [String]$lang = $Language, + [Parameter(Mandatory=$false)] [String]$packageRegex = "`"`"", + [Parameter(Mandatory=$false)] [String]$regexReplacement = "" + ) + # Update docfx.json + $docfxContent = Get-Content -Path $DocfxJsonPath -Raw + $docfxContent = $docfxContent -replace "`"_appTitle`": `"`"", "`"_appTitle`": `"Azure SDK for $appTitleLang`"" + $docfxContent = $docfxContent -replace "`"_appFooter`": `"`"", "`"_appFooter`": `"Azure SDK for $appTitleLang`"" + Set-Content -Path $DocfxJsonPath -Value $docfxContent -NoNewline + # Update main.js var lang + $mainJsContent = Get-Content -Path $MainJsPath -Raw + $mainJsContent = $mainJsContent -replace "var SELECTED_LANGUAGE = ''", "var SELECTED_LANGUAGE = '$lang'" + # Update main.js package regex and replacement + $mainJsContent = $mainJsContent -replace "var PACKAGE_REGEX = ''", "var PACKAGE_REGEX = $packageRegex" + $mainJsContent = $mainJsContent -replace "var PACKAGE_REPLACEMENT = ''", "var PACKAGE_REPLACEMENT = `"$regexReplacement`"" + Set-Content -Path $MainJsPath -Value $mainJsContent -NoNewline +} + +if ($GetGithubIoDocIndexFn -and (Test-Path "function:$GetGithubIoDocIndexFn")) +{ + &$GetGithubIoDocIndexFn +} +else +{ + LogWarning "The function for 'GetGithubIoDocIndexFn' was not found.` + Make sure it is present in eng/scripts/Language-Settings.ps1 and referenced in eng/common/scripts/common.ps1.` + See https://github.com/Azure/azure-sdk-tools/blob/master/doc/common/common_engsys.md#code-structure" +} diff --git a/eng/common/docgeneration/assets/logo.svg b/eng/common/docgeneration/assets/logo.svg new file mode 100644 index 000000000000..5da99f404886 --- /dev/null +++ b/eng/common/docgeneration/assets/logo.svg @@ -0,0 +1,76 @@ + + + + diff --git a/eng/common/docgeneration/docfx.json b/eng/common/docgeneration/docfx.json new file mode 100644 index 000000000000..837478ef9aac --- /dev/null +++ b/eng/common/docgeneration/docfx.json @@ -0,0 +1,72 @@ +{ + "metadata": [ + { + "src": [ + { + "files": [ + "src/**.csproj" + ] + } + ], + "dest": "api", + "disableGitFeatures": false, + "disableDefaultFilter": false + } + ], + "build": { + "content": [ + { + "files": [ + "api/**.yml", + "api/**.md", + "api/index.md" + ] + }, + { + "files": [ + "toc.yml", + "*.md" + ] + } + ], + "resource": [ + { + "files": [ + "images/**" + ] + } + ], + "overwrite": [ + { + "files": [ + "apidoc/**.md" + ], + "exclude": [ + "obj/**", + "_site/**" + ] + } + ], + "dest": "_site", + "globalMetadataFiles": [], + "fileMetadataFiles": [], + "template": [ + "default", + "templates/matthews" + ], + "postProcessors": [], + "markdownEngineName": "markdig", + "noLangKeyword": false, + "keepFileLink": false, + "cleanupCacheHistory": false, + "disableGitFeatures": false, + "globalMetadata": { + "_appTitle": "", + "_appFooter": "", + "_enableSearch": false, + "_enableNewTab": true, + "_appFaviconPath": "https://c.s-microsoft.com/favicon.ico?v2", + "_disableContribution": true + } + } +} diff --git a/eng/common/docgeneration/templates/matthews/partials/affix.tmpl.partial b/eng/common/docgeneration/templates/matthews/partials/affix.tmpl.partial new file mode 100644 index 000000000000..43a33d0120a3 --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/partials/affix.tmpl.partial @@ -0,0 +1,17 @@ +{{^_disableContribution}} +
+ {{#docurl}} + + {{/docurl}} + {{#sourceurl}} + + {{/sourceurl}} +
+{{/_disableContribution}} + + diff --git a/eng/common/docgeneration/templates/matthews/partials/class.header.tmpl.partial b/eng/common/docgeneration/templates/matthews/partials/class.header.tmpl.partial new file mode 100644 index 000000000000..49a27d827322 --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/partials/class.header.tmpl.partial @@ -0,0 +1,100 @@ +

{{>partials/title}}

+
{{{summary}}}
+
{{{conceptual}}}
+ +{{#inClass}} +
+
{{__global.inheritance}}
+ {{#inheritance}} +
{{{specName.0.value}}}
+ {{/inheritance}} +
{{name.0.value}}
+
+{{/inClass}} + +{{#derivedClasses}} +
{{{specName.0.value}}}
+{{/derivedClasses}} + +{{#inheritedMembers.0}} +
+
{{__global.inheritedMembers}}
+{{/inheritedMembers.0}} +{{#inheritedMembers}} +
+ {{#definition}} + + {{/definition}} + {{^definition}} + + {{/definition}} +
+{{/inheritedMembers}} +{{#inheritedMembers.0}} +
+{{/inheritedMembers.0}} + +
{{__global.namespace}}: {{namespace}}
+
{{__global.assembly}}: {{assemblies.0}}.dll
+ +
{{__global.syntax}}
+
+
{{syntax.content.0.value}}
+
+ +{{#syntax.parameters.0}} +
{{__global.parameters}}
+ +{{/syntax.parameters.0}} +{{#syntax.parameters}} + + + +{{/syntax.parameters}} +{{#syntax.parameters.0}} +
+ {{{type.specName.0.value}}} + {{{id}}} +

{{{description}}}

+
+{{/syntax.parameters.0}} + +{{#syntax.return}} +
{{__global.returns}}
+ + + + +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/syntax.return}} + +{{#syntax.typeParameters.0}} +
{{__global.typeParameters}}
+ +{{/syntax.typeParameters.0}} +{{#syntax.typeParameters}} + + + +{{/syntax.typeParameters}} +{{#syntax.typeParameters.0}} +
+ {{{id}}} +

{{{description}}}

+
+{{/syntax.typeParameters.0}} + +{{#remarks}} +
{{__global.remarks}}
+
{{{remarks}}}
+{{/remarks}} + +{{#example.0}} +
{{__global.examples}}
+{{/example.0}} +{{#example}} +{{{.}}} +{{/example}} diff --git a/eng/common/docgeneration/templates/matthews/partials/class.tmpl.partial b/eng/common/docgeneration/templates/matthews/partials/class.tmpl.partial new file mode 100644 index 000000000000..5f00b822cc76 --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/partials/class.tmpl.partial @@ -0,0 +1,210 @@ +{{>partials/class.header}} +{{#children}} +

{{>partials/classSubtitle}}

+{{#children}} + +{{^_disableContribution}} +{{#docurl}} + + + + +{{/docurl}} +{{#sourceurl}} + + + +{{/sourceurl}} +{{/_disableContribution}} + +{{#overload}} + +{{/overload}} + +

{{name.0.value}}

+ +
+
{{{summary}}}
+
{{{conceptual}}}
+
{{__global.declaration}}
+ +{{#syntax}} +
+
{{syntax.content.0.value}}
+
+ +{{#parameters.0}} +
{{__global.parameters}}
+ +{{/parameters.0}} +{{#parameters}} + + + +{{/parameters}} +{{#parameters.0}} +
+ {{{type.specName.0.value}}} + {{{id}}} +

{{{description}}}

+
+{{/parameters.0}} + +{{#return}} +
{{__global.returns}}
+ + + + +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/return}} + +{{#typeParameters.0}} +
{{__global.typeParameters}}
+ +{{/typeParameters.0}} +{{#typeParameters}} + + + +{{/typeParameters}} +{{#typeParameters.0}} +
+ {{{id}}} +

{{{description}}}

+
+{{/typeParameters.0}} + +{{#fieldValue}} +
{{__global.fieldValue}}
+ + + + +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/fieldValue}} + +{{#propertyValue}} +
{{__global.propertyValue}}
+ + + + +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/propertyValue}} + +{{#eventType}} +
{{__global.eventType}}
+ + + + +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/eventType}} +{{/syntax}} + +{{#overridden}} +
{{__global.overrides}}
+
+{{/overridden}} + +{{#implements.0}} +
{{__global.implements}}
+{{/implements.0}} +{{#implements}} + {{#definition}} +
+ {{/definition}} + {{^definition}} +
+ {{/definition}} +{{/implements}} + +{{#remarks}} +
{{__global.remarks}}
+
{{{remarks}}}
+{{/remarks}} + +{{#example.0}} +
{{__global.examples}}
+{{/example.0}} +{{#example}} +{{{.}}} +{{/example}} + +{{#exceptions.0}} +
{{__global.exceptions}}
+ +{{/exceptions.0}} +{{#exceptions}} + + + +{{/exceptions}} +{{#exceptions.0}} +
+ {{{type.specName.0.value}}} +

{{{description}}}

+
+{{/exceptions.0}} + +{{#seealso.0}} +
{{__global.seealso}}
+
+{{/seealso.0}} +{{#seealso}} + {{#isCref}} +
{{{type.specName.0.value}}}
+ {{/isCref}} + {{^isCref}} +
{{{url}}}
+ {{/isCref}} +{{/seealso}} +{{#seealso.0}} +
+{{/seealso.0}} +
+{{/children}} +{{/children}} + +{{#extensionMethods.0}} +

{{__global.extensionMethods}}

+{{/extensionMethods.0}} +{{#extensionMethods}} +
+ {{#definition}} + + {{/definition}} + {{^definition}} + + {{/definition}} +
+{{/extensionMethods}} + +{{#seealso.0}} +

{{__global.seealso}}

+
+{{/seealso.0}} +{{#seealso}} + {{#isCref}} +
{{{type.specName.0.value}}}
+ {{/isCref}} + {{^isCref}} +
{{{url}}}
+ {{/isCref}} +{{/seealso}} +{{#seealso.0}} +
+{{/seealso.0}} diff --git a/eng/common/docgeneration/templates/matthews/partials/enum.tmpl.partial b/eng/common/docgeneration/templates/matthews/partials/enum.tmpl.partial new file mode 100644 index 000000000000..91e7ede1ac83 --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/partials/enum.tmpl.partial @@ -0,0 +1,24 @@ +{{>partials/class.header}} +{{#children}} + {{#children}} +

{{name.0.value}}

+ +
+

{{{summary}}}

+
+ {{/children}} +{{/children}} + +{{#extensionMethods.0}} +

{{__global.extensionMethods}}

+{{/extensionMethods.0}} +{{#extensionMethods}} +
+ {{#definition}} + + {{/definition}} + {{^definition}} + + {{/definition}} +
+{{/extensionMethods}} diff --git a/eng/common/docgeneration/templates/matthews/partials/namespace.tmpl.partial b/eng/common/docgeneration/templates/matthews/partials/namespace.tmpl.partial new file mode 100644 index 000000000000..f607a3dc61bd --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/partials/namespace.tmpl.partial @@ -0,0 +1,17 @@ +

{{>partials/title}}

+
{{{summary}}}
+
{{{conceptual}}}
+
{{{remarks}}}
+{{#children}} +

{{>partials/namespaceSubtitle}}

+ + {{#children}} + + + + {{/children}} +
+

+

{{{summary}}}

+
+{{/children}} diff --git a/eng/common/docgeneration/templates/matthews/styles/main.css b/eng/common/docgeneration/templates/matthews/styles/main.css new file mode 100644 index 000000000000..944ef728f68e --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/styles/main.css @@ -0,0 +1,311 @@ +@import url("https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.3/css/font-awesome.min.css"); + +/* Clickability fix for selector on sm devices */ +@media (min-width: 768px) and (max-width: 991px) { + article h1:first-of-type:before { + height: 0; + margin-top: 0; + } +} + +#search { + border: none; +} + +.fa-code { + font-size: 19px; +} + +.sidetoc, +body .toc, +.sidefilter, +.sidetoggle { + background-color: #f9fbe7; +} + +.sidenav, +.toc-toggle { + padding: 0; +} + +.sidetoggle { + padding-bottom: 15px; +} + +/* Remove center align from Navbar and Collapsible section */ +.collapse.in, +.collapsing { + text-align: unset; +} + +article h4 { + border-bottom: none; + line-height: normal; +} + +@media (min-width: 768px) { + .sidetoc, .sidefilter { + margin-left: -15px; + } +} + +@media (max-width: 767px) { + .navbar-collapse { + text-align: center !important; + } + + .navbar-collapse li .active { + border-radius: 20px; + } +} + +/* Collapsible Sections + ------------------------------------------------------- */ +.expander:after { + font-family: 'Glyphicons Halflings'; + content: "\e260"; + margin-left: 5px; + color: grey; + font-size: small; +} + +.expander.collapsed:after { + content: "\e259"; +} + +/* Floating buttons + ------------------------------------------------------- */ +.fab { + width: 40px; + height: 40px; + text-align: center; + padding: 11px 0 0 0; + border: none; + outline: none; + color: #FFF; + border-radius: 100%; + box-shadow: 0 3px 6px rgba(0,0,0,0.16), 0 3px 6px rgba(0,0,0,0.23); + transition:.3s; +} + +.fab:hover { + transform: scale(1.1); +} + +.fab + .fab { + margin-right: 15px; +} + +.contribution-panel { + z-index: 1000; + position: fixed; + right: 30px; + top: 70px; +} + +/* Bootstrap docs like sidebar + ------------------------------------------------------- */ +.affix h5 { + display: none; +} + +/* active & hover links */ +.affix ul > li > a:hover, +.affix ul > li.active > a, +.affix ul > li > a:focus { + color: #563d7c; + text-decoration: none; + background-color: transparent; + border-left-color: #563d7c; +} + +/* all active links */ +.affix ul > li.active > a, +.affix ul > li.active:hover > a, +.affix ul > li.active:focus >a { + font-weight: 700; +} + +/* nested active links */ +.affix ul ul > li.active > a, +.affix ul ul > li.active:hover > a, +.affix ul ul > li.active:focus > a { + font-weight: 500; +} + +/* all links */ +.affix ul > li > a { + color: #999; + border-left: 2px solid transparent; + padding: 4px 20px; + font-size: 13px; + font-weight: 400; +} + +/* nested links */ +.affix ul ul > li > a { + padding-top: 1px; + padding-bottom: 1px; + padding-left: 30px; + font-size: 12px; +} + +/* hide inactive nested list */ +.affix ul ul { + display: none; +} + +/* show active nested list */ +.affix ul > li.active > ul { + display: block; +} + +.affix > ul > li > a:before { + content: ''; +} + +.affix ul ul > li > a:before { + content: ''; +} + +/* Style Buttons + ------------------------------------------------------- */ +.btn-warning { + background-color: #0071c5; +} + +.btn-info { + background-color: #0071c5; +} + +/* Navbar Hamburger + ------------------------------------------------------- */ +.icon-bar { + transition: 0.4s; +} + +/* Rotate first bar */ +.change .icon-bar:nth-of-type(2) { + transform: rotate(-45deg) translate(-4px, 5px) ; +} + +/* Fade out the second bar */ +.change .icon-bar:nth-of-type(3) { + opacity: 0; +} + +/* Rotate last bar */ +.change .icon-bar:nth-of-type(4) { + transform: rotate(45deg) translate(-4px, -5px) ; +} + +/* Custom Navbar + ------------------------------------------------------- */ +.navbar-inverse { + background-color: #0071c5; + opacity: 0.95; + border-color: #0071c5; +} +.navbar-inverse .navbar-brand { + color: #ffffff; +} +.navbar-inverse .navbar-brand:hover, +.navbar-inverse .navbar-brand:focus { + color: #ecdbff; +} +.navbar-inverse .navbar-text { + color: #ffffff; +} +.navbar-inverse .navbar-nav > li > a { + color: #ffffff; +} +.navbar-inverse .navbar-nav > li > a:hover, +.navbar-inverse .navbar-nav > li > a:focus { + color: #ecdbff; +} +.navbar-inverse .navbar-nav > .active > a, +.navbar-inverse .navbar-nav > .active > a:hover, +.navbar-inverse .navbar-nav > .active > a:focus { + color: #ecdbff; + background-color: #0071c5; +} +.navbar-inverse .navbar-nav > .open > a, +.navbar-inverse .navbar-nav > .open > a:hover, +.navbar-inverse .navbar-nav > .open > a:focus { + color: #ecdbff; + background-color: #0071c5; +} +.navbar-inverse .navbar-toggle { + border-color: #0071c5; +} +.navbar-inverse .navbar-toggle:hover, +.navbar-inverse .navbar-toggle:focus { + background-color: #0071c5; +} +.navbar-inverse .navbar-toggle .icon-bar { + background-color: #ffffff; +} +.navbar-inverse .navbar-collapse, +.navbar-inverse .navbar-form { + border: none; +} +.navbar-inverse .navbar-link { + color: #ffffff; +} +.navbar-inverse .navbar-link:hover { + color: #ecdbff; +} +.versionarrow { + margin-left: 0.8em; + margin-top: -1.5em; + margin-bottom: -1em; + padding: 1em; +} + +.versionarrow::before { + position: absolute; + content: ''; + width: 0; + height: 0; + border: .5em solid transparent; + border-left-color: gray; + transform-origin: 0 50%; + transition: transform .1s; + margin-top: 0.2em; +} + + +.versionarrow.disable { + text-decoration: line-through; +} + +.versionarrow.down::before { + transform: rotate(90deg); + margin-top: 0em; + transition: transform .1s; +} + +@media (max-width: 767px) { + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a { + color: #ffffff; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:hover, + .navbar-inverse .navbar-nav .open .dropdown-menu > li > a:focus { + color: #ecdbff; + } + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a, + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:hover, + .navbar-inverse .navbar-nav .open .dropdown-menu > .active > a:focus { + color: #ecdbff; + background-color: #0071c5; + } +} + +.navbar-version-select { + padding: 2px; + border: none; + border-radius: 2px; + box-shadow: none; + -webkit-appearance: media-time-remaining-display; + margin-top: 14px; +} \ No newline at end of file diff --git a/eng/common/docgeneration/templates/matthews/styles/main.js b/eng/common/docgeneration/templates/matthews/styles/main.js new file mode 100644 index 000000000000..7ab1dd9e28ff --- /dev/null +++ b/eng/common/docgeneration/templates/matthews/styles/main.js @@ -0,0 +1,238 @@ +// Use container fluid +var containers = $(".container"); +containers.removeClass("container"); +containers.addClass("container-fluid"); + +WINDOW_CONTENTS = window.location.href.split('/') +var SELECTED_LANGUAGE = '' +var PACKAGE_REGEX = '' +var PACKAGE_REPLACEMENT = '' + +ATTR1 = '[System.ComponentModel.EditorBrowsable]\n<' + +// Navbar Hamburger +$(function () { + $(".navbar-toggle").click(function () { + $(this).toggleClass("change"); + }) +}) + +// Select list to replace affix on small screens +$(function () { + var navItems = $(".sideaffix .level1 > li"); + + if (navItems.length == 0) { + return; + } + + var selector = $("