diff --git a/build_package.py b/build_package.py index b8d964826792..5c998ebcc182 100644 --- a/build_package.py +++ b/build_package.py @@ -10,8 +10,8 @@ import os import glob import sys -from subprocess import check_call +from subprocess import check_call DEFAULT_DEST_FOLDER = "./dist" diff --git a/eng/pipelines/templates/jobs/archetype-sdk-client.yml b/eng/pipelines/templates/jobs/archetype-sdk-client.yml index b4751dca6575..25100405e343 100644 --- a/eng/pipelines/templates/jobs/archetype-sdk-client.yml +++ b/eng/pipelines/templates/jobs/archetype-sdk-client.yml @@ -211,7 +211,7 @@ jobs: BeforeTestSteps: - task: DownloadPipelineArtifact@0 inputs: - artifactName: 'artifacts' + artifactName: 'packages' targetPath: $(Build.ArtifactStagingDirectory) - template: ../steps/set-dev-build.yml diff --git a/eng/pipelines/templates/stages/archetype-python-release.yml b/eng/pipelines/templates/stages/archetype-python-release.yml index d403445304a2..7540e4bba413 100644 --- a/eng/pipelines/templates/stages/archetype-python-release.yml +++ b/eng/pipelines/templates/stages/archetype-python-release.yml @@ -31,6 +31,7 @@ stages: deploy: steps: - checkout: self + - ${{if eq(parameters.TestPipeline, 'true')}}: - task: PowerShell@2 displayName: Prep template pipeline for release @@ -40,24 +41,26 @@ stages: workingDirectory: $(Build.SourcesDirectory) filePath: eng/scripts/SetTestPipelineVersion.ps1 arguments: '-BuildID $(Build.BuildId)' + - ${{if ne(artifact.skipVerifyChangeLog, 'true')}}: - template: /eng/common/pipelines/templates/steps/verify-changelog.yml parameters: PackageName: ${{artifact.name}} ServiceName: ${{parameters.ServiceDirectory}} ForRelease: true - - template: /eng/pipelines/templates/steps/stage-filtered-artifacts.yml - parameters: - SourceFolder: ${{parameters.ArtifactName}} - TargetFolder: ${{artifact.safeName}} - PackageName: ${{artifact.name}} + - pwsh: | - Get-ChildItem -Recurse $(Pipeline.Workspace)/${{artifact.safeName}} + $packageDirectory = "${{artifact.name}}".Replace("_", "-") + echo "##vso[task.setvariable variable=Package.Name]$packageDirectory" + + - pwsh: | + Get-ChildItem -Recurse $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name) workingDirectory: $(Pipeline.Workspace) displayName: Output Visible Artifacts + - template: /eng/common/pipelines/templates/steps/create-tags-and-git-release.yml parameters: - ArtifactLocation: $(Pipeline.Workspace)/${{artifact.safeName}} + ArtifactLocation: $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name) PackageRepository: PyPI ReleaseSha: $(Build.SourceVersion) RepoId: Azure/azure-sdk-for-python @@ -83,12 +86,6 @@ stages: artifact: ${{parameters.ArtifactName}} timeoutInMinutes: 5 - - template: /eng/pipelines/templates/steps/stage-filtered-artifacts.yml - parameters: - SourceFolder: ${{parameters.ArtifactName}} - TargetFolder: ${{artifact.safeName}} - PackageName: ${{artifact.name}} - - task: UsePythonVersion@0 - script: | @@ -96,6 +93,10 @@ stages: pip install twine readme-renderer[md] displayName: Install Twine + - pwsh: | + $packageDirectory = "${{artifact.name}}".Replace("_", "-") + echo "##vso[task.setvariable variable=Package.Name]$packageDirectory" + - task: TwineAuthenticate@1 displayName: 'Authenticate to registry: pypi.org' inputs: @@ -108,17 +109,17 @@ stages: - script: | set -e - twine upload --repository 'pypi' --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*.whl + twine upload --repository 'pypi' --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*.whl echo "Uploaded whl to pypi" - twine upload --repository 'pypi' --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*.zip + twine upload --repository 'pypi' --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*.zip echo "Uploaded zip to pypi" displayName: 'Publish package to registry: pypi.org' - script: | set -e - twine upload --repository ${{parameters.DevFeedName}} --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*.whl + twine upload --repository ${{parameters.DevFeedName}} --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*.whl echo "Uploaded whl to devops feed" - twine upload --repository ${{parameters.DevFeedName}} --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*.zip + twine upload --repository ${{parameters.DevFeedName}} --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*.zip echo "Uploaded sdist to devops feed" displayName: 'Publish package to feed: ${{parameters.DevFeedName}}' @@ -138,23 +139,23 @@ stages: deploy: steps: - checkout: self - - template: /eng/pipelines/templates/steps/stage-filtered-artifacts.yml - parameters: - SourceFolder: ${{parameters.DocArtifact}} - TargetFolder: ${{artifact.safeName}} - PackageName: ${{artifact.name}} - AdditionalRegex: '.zip' + + - pwsh: | + $packageDirectory = "${{artifact.name}}".Replace("_", "-") + echo "##vso[task.setvariable variable=Package.Name]$packageDirectory" + - pwsh: | - Get-ChildItem -Recurse $(Pipeline.Workspace)/${{artifact.safeName}} + Get-ChildItem -Recurse $(Pipeline.Workspace)/${{parameters.DocArtifact}}/$(Package.Name) workingDirectory: $(Pipeline.Workspace) displayName: Output Visible Artifacts + - template: /eng/common/pipelines/templates/steps/publish-blobs.yml parameters: - FolderForUpload: '$(Pipeline.Workspace)/${{artifact.safeName}}' + FolderForUpload: '$(Pipeline.Workspace)/${{parameters.DocArtifact}}/$(Package.Name)' BlobSASKey: '$(azure-sdk-docs-prod-sas)' BlobName: '$(azure-sdk-docs-prod-blob-name)' TargetLanguage: 'python' - ArtifactLocation: '$(Pipeline.Workspace)/${{parameters.ArtifactName}}' + ArtifactLocation: '$(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)' # we override the regular script path because we have cloned the build tools repo as a separate artifact. ScriptPath: 'eng/common/scripts/copy-docs-to-blobstorage.ps1' @@ -177,22 +178,24 @@ stages: deploy: steps: - checkout: self - - template: /eng/pipelines/templates/steps/stage-filtered-artifacts.yml - parameters: - SourceFolder: ${{parameters.ArtifactName}} - TargetFolder: ${{artifact.safeName}} - PackageName: ${{artifact.name}} + + - pwsh: | + $packageDirectory = "${{artifact.name}}".Replace("_", "-") + echo "##vso[task.setvariable variable=Package.Name]$packageDirectory" + - pwsh: | - Get-ChildItem -Recurse $(Pipeline.Workspace)/${{artifact.safeName}} + Get-ChildItem -Recurse $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name) workingDirectory: $(Pipeline.Workspace) displayName: Output Visible Artifacts + - template: /eng/common/pipelines/templates/steps/get-pr-owners.yml parameters: TargetVariable: "OwningGHUser" ServiceDirectory: ${{parameters.ServiceDirectory}} + - template: /eng/common/pipelines/templates/steps/docs-metadata-release.yml parameters: - ArtifactLocation: $(Pipeline.Workspace)/${{artifact.safeName}} + ArtifactLocation: $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name) PackageRepository: PyPI ReleaseSha: $(Build.SourceVersion) RepoId: Azure/azure-sdk-for-python @@ -280,22 +283,17 @@ stages: - ${{ each artifact in parameters.Artifacts }}: - ${{if ne(artifact.skipPublishDevFeed, 'true')}}: - pwsh: | - Get-ChildItem $(Pipeline.Workspace)/${{parameters.ArtifactName}} - New-Item -Type Directory -Name ${{artifact.safeName}} -Path $(Pipeline.Workspace) - $underscorePrefix = "${{artifact.name}}" - $dashPrefix = "${{artifact.name}}".Replace("_", "-") - Copy-Item $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$dashPrefix-[0-9]*.[0-9]*.[0-9]*a[0-9]* $(Pipeline.Workspace)/${{artifact.safeName}} - Copy-Item $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$underscorePrefix-[0-9]*.[0-9]*.[0-9]*a[0-9]* $(Pipeline.Workspace)/${{artifact.safeName}} - Get-ChildItem $(Pipeline.Workspace)/${{artifact.safeName}} - - $fileCount = (Get-ChildItem $(Pipeline.Workspace)/${{artifact.safeName}} | Measure-Object).Count + $packageDirectory = "${{artifact.name}}".Replace("_", "-") + echo "##vso[task.setvariable variable=Package.Name]$packageDirectory" + - pwsh: | + $fileCount = (Get-ChildItem $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$packageDirectory | Measure-Object).Count if ($fileCount -eq 0) { Write-Host "No alpha packages for ${{artifact.safeName}} to publish." exit 0 } - twine upload --repository $(DevFeedName) --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*a*.whl + twine upload --repository $(DevFeedName) --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*a*.whl echo "Uploaded whl to devops feed $(DevFeedName)" - twine upload --repository $(DevFeedName) --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{artifact.safeName}}/*a*.zip + twine upload --repository $(DevFeedName) --config-file $(PYPIRC_PATH) $(Pipeline.Workspace)/${{parameters.ArtifactName}}/$(Package.Name)/*a*.zip echo "Uploaded sdist to devops feed $(DevFeedName)" displayName: 'Publish ${{artifact.name}} alpha package' diff --git a/eng/pipelines/templates/steps/analyze.yml b/eng/pipelines/templates/steps/analyze.yml index 5c0c7b43d8d2..163c85fa686c 100644 --- a/eng/pipelines/templates/steps/analyze.yml +++ b/eng/pipelines/templates/steps/analyze.yml @@ -88,7 +88,7 @@ steps: - task: DownloadPipelineArtifact@0 condition: and(succeededOrFailed(), ne(variables['Skip.ApiStubGen'],'true')) inputs: - artifactName: 'artifacts' + artifactName: 'packages' targetPath: $(Build.ArtifactStagingDirectory) - template: ../steps/run_apistub.yml diff --git a/eng/pipelines/templates/steps/build-artifacts.yml b/eng/pipelines/templates/steps/build-artifacts.yml index 7da0788bd79c..a816abeb96a6 100644 --- a/eng/pipelines/templates/steps/build-artifacts.yml +++ b/eng/pipelines/templates/steps/build-artifacts.yml @@ -1,9 +1,19 @@ parameters: - BeforePublishSteps: [] - TestPipeline: false - BuildTargetingString: 'azure-*' - ServiceDirectory: '' - BuildDocs: true + - name: BeforePublishSteps + type: object + default: [] + - name: TestPipeline + type: boolean + default: false + - name: BuildTargetingString + type: string + default: 'azure-*' + - name: ServiceDirectory + type: string + default: '' + - name: BuildDocs + type: boolean + default: true steps: - ${{if eq(parameters.TestPipeline, 'true')}}: @@ -57,8 +67,8 @@ steps: arguments: '-d "$(Build.ArtifactStagingDirectory)" "${{ parameters.BuildTargetingString }}" --service=${{parameters.ServiceDirectory}} --devbuild="$(SetDevVersion)"' - script: | - twine check $(Build.ArtifactStagingDirectory)/*.whl - twine check $(Build.ArtifactStagingDirectory)/*.zip + twine check $(Build.ArtifactStagingDirectory)/**/*.whl + twine check $(Build.ArtifactStagingDirectory)/**/*.zip displayName: 'Verify Readme' - task: PythonScript@0 @@ -73,17 +83,9 @@ steps: - ${{ parameters.BeforePublishSteps }} - - task: PublishPipelineArtifact@0 - inputs: - artifactName: 'artifacts' - targetPath: $(Build.ArtifactStagingDirectory) - - # Duplicating the task above to introduce a packages artifact for consistency - # with the other pipelines. Also using the newer YAML shortcut. Once we get - # past release successfully with unified pipelines we'll look at getting rid - # of the duplicated "artifacts" artifact. - publish: $(Build.ArtifactStagingDirectory) artifact: packages + condition: succeededOrFailed() - task: PublishBuildArtifacts@1 condition: and(succeededOrFailed(), ${{parameters.BuildDocs}}) diff --git a/eng/pipelines/templates/steps/test_regression.yml b/eng/pipelines/templates/steps/test_regression.yml index 23b8ad2c9360..9109fa766520 100644 --- a/eng/pipelines/templates/steps/test_regression.yml +++ b/eng/pipelines/templates/steps/test_regression.yml @@ -12,7 +12,7 @@ steps: - task: DownloadPipelineArtifact@0 inputs: - artifactName: 'artifacts' + artifactName: 'packages' targetPath: $(Build.ArtifactStagingDirectory) - script: | diff --git a/eng/tox/run_sphinx_build.py b/eng/tox/run_sphinx_build.py index 369a32490e3e..61293779d5fa 100644 --- a/eng/tox/run_sphinx_build.py +++ b/eng/tox/run_sphinx_build.py @@ -35,7 +35,7 @@ def move_output_and_zip(target_dir, package_dir, package_name): if not os.path.exists(ci_doc_dir): os.mkdir(ci_doc_dir) - individual_zip_location = os.path.join(ci_doc_dir, package_name) + individual_zip_location = os.path.join(ci_doc_dir, package_name, package_name) shutil.make_archive(individual_zip_location, 'zip', target_dir) def sphinx_build(target_dir, output_dir): diff --git a/eng/tox/tox_helper_tasks.py b/eng/tox/tox_helper_tasks.py index fe4e07bf97bf..b9b43a293cc7 100644 --- a/eng/tox/tox_helper_tasks.py +++ b/eng/tox/tox_helper_tasks.py @@ -17,6 +17,7 @@ import io import glob import zipfile +import fnmatch logging.getLogger().setLevel(logging.INFO) @@ -92,7 +93,13 @@ def find_sdist(dist_dir, pkg_name, pkg_version): return pkg_name_format = "{0}-{1}.zip".format(pkg_name, pkg_version) - packages = [os.path.basename(w) for w in glob.glob(os.path.join(dist_dir, pkg_name_format))] + packages = [] + for root, dirnames, filenames in os.walk(dist_dir): + for filename in fnmatch.filter(filenames, pkg_name_format): + packages.append(os.path.join(root, filename)) + + packages = [os.path.relpath(w, dist_dir) for w in packages] + if not packages: logging.error("No sdist is found in directory %s with package name format %s", dist_dir, pkg_name_format) return @@ -109,8 +116,15 @@ def find_whl(whl_dir, pkg_name, pkg_version): logging.error("Package name cannot be empty to find whl") return - pkg_name_format = "{0}-{1}-*.whl".format(pkg_name.replace("-", "_"), pkg_version) - whls = [os.path.basename(w) for w in glob.glob(os.path.join(whl_dir, pkg_name_format))] + + pkg_name_format = "{0}-{1}*.whl".format(pkg_name.replace("-", "_"), pkg_version) + whls = [] + for root, dirnames, filenames in os.walk(whl_dir): + for filename in fnmatch.filter(filenames, pkg_name_format): + whls.append(os.path.join(root, filename)) + + whls = [os.path.relpath(w, whl_dir) for w in whls] + if not whls: logging.error("No whl is found in directory %s with package name format %s", whl_dir, pkg_name_format) logging.info("List of whls in directory: %s", glob.glob(os.path.join(whl_dir, "*.whl"))) @@ -136,6 +150,3 @@ def find_whl(whl_dir, pkg_name, pkg_version): return whls[0] else: return None - - - diff --git a/scripts/devops_tasks/build_packages.py b/scripts/devops_tasks/build_packages.py index cc85c0df6f14..3b515f31f6c3 100644 --- a/scripts/devops_tasks/build_packages.py +++ b/scripts/devops_tasks/build_packages.py @@ -25,7 +25,7 @@ def build_packages(targeted_packages, distribution_directory, is_dev_build=False): # run the build and distribution for package_root in targeted_packages: - print(package_root) + service_hierarchy = os.path.join(os.path.basename(package_root)) if is_dev_build: verify_update_package_requirement(package_root) print("Generating Package Using Python {}".format(sys.version)) @@ -34,7 +34,7 @@ def build_packages(targeted_packages, distribution_directory, is_dev_build=False sys.executable, build_packing_script_location, "--dest", - distribution_directory, + os.path.join(distribution_directory, service_hierarchy), package_root, ], root_dir, diff --git a/scripts/devops_tasks/common_tasks.py b/scripts/devops_tasks/common_tasks.py index 317ae2026230..1ec5d86cdbf3 100644 --- a/scripts/devops_tasks/common_tasks.py +++ b/scripts/devops_tasks/common_tasks.py @@ -19,7 +19,7 @@ import textwrap import io import re -import pdb +import fnmatch # Assumes the presence of setuptools from pkg_resources import parse_version, parse_requirements, Requirement, WorkingSet, working_set @@ -356,9 +356,15 @@ def find_whl(package_name, version, whl_directory): parsed_version = parse(version) logging.info("Searching whl for package {0}-{1}".format(package_name, parsed_version.base_version)) - whl_name = "{0}-{1}*.whl".format(package_name.replace("-", "_"), parsed_version.base_version) - paths = glob.glob(os.path.join(whl_directory, whl_name)) - if not paths: + whl_name_format = "{0}-{1}*.whl".format(package_name.replace("-", "_"), parsed_version.base_version) + whls = [] + for root, dirnames, filenames in os.walk(whl_directory): + for filename in fnmatch.filter(filenames, whl_name_format): + whls.append(os.path.join(root, filename)) + + whls = [os.path.relpath(w, whl_directory) for w in whls] + + if not whls: logging.error( "whl is not found in whl directory {0} for package {1}-{2}".format( whl_directory, package_name, parsed_version.base_version @@ -366,7 +372,7 @@ def find_whl(package_name, version, whl_directory): ) exit(1) - return paths[0] + return whls[0] # This method installs package from a pre-built whl def install_package_from_whl( diff --git a/scripts/devops_tasks/test_regression.py b/scripts/devops_tasks/test_regression.py index 044de110532d..6f3e240addb9 100644 --- a/scripts/devops_tasks/test_regression.py +++ b/scripts/devops_tasks/test_regression.py @@ -109,7 +109,8 @@ def run(self): pkg_name = self.context.package_name if pkg_name in self.package_dependency_dict: logging.info("Running regression test for {}".format(pkg_name)) - self.whl_path = find_whl(pkg_name, self.context.pkg_version, self.context.whl_directory) + + self.whl_path = os.path.join(self.context.whl_directory, find_whl(pkg_name, self.context.pkg_version, self.context.whl_directory)) if find_packages_missing_on_pypi(self.whl_path): logging.error("Required packages are not available on PyPI. Skipping regression test") exit(0) @@ -169,21 +170,30 @@ def _run_test(self, dep_pkg_path): dep_pkg_path ) - # Install pre-built whl for current package + # Install pre-built whl for current package. install_package_from_whl( self.whl_path, self.context.temp_path, self.context.venv.python_executable, ) - # install package to be tested and run pytest + + # install dependent package from source + self._install_packages(dep_pkg_path, self.context.package_name) + + # try install of pre-built whl for current package again. if unnecessary, pip does nothing. + # we do this to ensure that the correct development version is installed. on non-dev builds + # this step will just skip through. + install_package_from_whl( + self.whl_path, + self.context.temp_path, + self.context.venv.python_executable, + ) + self._execute_test(dep_pkg_path) finally: self.context.deinitialize(dep_pkg_path) def _execute_test(self, dep_pkg_path): - # install dependent package from source - self._install_packages(dep_pkg_path, self.context.package_name) - # Ensure correct version of package is installed if not self._is_package_installed(self.context.package_name, self.context.pkg_version): logging.error("Incorrect version of package {0} is installed. Expected version {1}".format(self.context.package_name, self.context.pkg_version)) diff --git a/scripts/devops_tasks/tox_harness.py b/scripts/devops_tasks/tox_harness.py index 6050d00c57dc..af97a9f2b6dc 100644 --- a/scripts/devops_tasks/tox_harness.py +++ b/scripts/devops_tasks/tox_harness.py @@ -243,7 +243,7 @@ def build_whl_for_req(req, package_path): logging.info("Building wheel for package {}".format(pkg_name)) run_check_call([sys.executable, "setup.py", "bdist_wheel", "-d", temp_dir], req_pkg_path) - whl_path = find_whl(pkg_name, version, temp_dir) + whl_path = os.path.join(temp_dir, find_whl(pkg_name, version, temp_dir)) logging.info("Wheel for package {0} is {1}".format(pkg_name, whl_path)) logging.info("Replacing dev requirement. Old requirement:{0}, New requirement:{1}".format(req, whl_path)) return whl_path @@ -265,6 +265,7 @@ def replace_dev_reqs(file, pkg_root): req_file_name = os.path.basename(file) logging.info("Old {0}:{1}".format(req_file_name, adjusted_req_lines)) + adjusted_req_lines = list(map(lambda x: build_whl_for_req(x, pkg_root), adjusted_req_lines)) logging.info("New {0}:{1}".format(req_file_name, adjusted_req_lines))