diff --git a/.azure_pipelines/job_templates/olive-build-doc-template.yaml b/.azure_pipelines/job_templates/olive-build-doc-template.yaml index 4ff511f12..81bd87668 100644 --- a/.azure_pipelines/job_templates/olive-build-doc-template.yaml +++ b/.azure_pipelines/job_templates/olive-build-doc-template.yaml @@ -18,7 +18,7 @@ jobs: versionSpec: 3.8 displayName: Use Python 3.8 - # checout release branch if doc_version provided + # checkout release branch if doc_version provided - script: | git config --global user.email "olivedevteam@microsoft.com" git config --global user.name "olivedevteam" diff --git a/.azure_pipelines/job_templates/olive-build-template.yaml b/.azure_pipelines/job_templates/olive-build-template.yaml index ac4e8fc10..919df650e 100644 --- a/.azure_pipelines/job_templates/olive-build-template.yaml +++ b/.azure_pipelines/job_templates/olive-build-template.yaml @@ -3,71 +3,72 @@ parameters: name: '' pool: '' - gpu: False + device: 'cpu' jobs: -- job: ${{parameters.name}}_Test_Olive - timeoutInMinutes: 300 - condition: eq('${{ parameters.gpu }}', 'False') - pool: - name: ${{ parameters.pool}} - variables: - WINDOWS: ${{ parameters.windows}} - runCodesignValidationInjection: false - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: 3.8 - displayName: Use Python 3.8 - - - script: make install-olive PIPELINE=True INSTALL_EXTRAS=[cpu] - displayName: Install Olive - - - task: AzureCLI@1 - inputs: - azureSubscription: $(OLIVE_RG_SERVICE_CONNECTION) - scriptLocation: 'inlineScript' - inlineScript: make test - displayName: Test Olive - env: - OLIVEWHEELS_STORAGE_CONNECTION_STRING: $(olive-wheels-storage-connection-string) - WORKSPACE_SUBSCRIPTION_ID: $(workspace-subscription-id) - WORKSPACE_RESOURCE_GROUP: $(workspace-resource-group) - WORKSPACE_NAME: $(workspace-name) - AZURE_TENANT_ID: $(azure-tenant-id) - AZURE_CLIENT_ID: $(olive-rg-sp-id) - AZURE_CLIENT_SECRET: $(olive-rg-sp-secret) - PIPELINE: True - - - task: CredScan@3 - displayName: 'Run CredScan' - inputs: - debugMode: false - continueOnError: true - - - task: ComponentGovernanceComponentDetection@0 - inputs: - scanType: 'Register' - verbosity: 'Verbose' - alertWarningLevel: 'High' - displayName: Component Detection - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/*TestOlive*.xml' - testRunTitle: '$(Build.BuildNumber)[$(Agent.JobName)]' - displayName: Upload pipeline run test results - - - task: PublishCodeCoverageResults@1 - inputs: - codeCoverageTool: 'Cobertura' - summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml' - - - script: make clean WINDOWS=$(WINDOWS) - condition: always() - displayName: Clean remaining artifacts +- ${{ if eq(parameters.device, 'cpu') }}: + - job: ${{parameters.name}}_Test_Olive + timeoutInMinutes: 300 + pool: + name: ${{ parameters.pool}} + variables: + WINDOWS: ${{ parameters.windows}} + runCodesignValidationInjection: false + device: ${{ parameters.device }} + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: 3.8 + displayName: Use Python 3.8 + + - script: make install-olive PIPELINE=True INSTALL_EXTRAS=[$(device)] + displayName: Install Olive + + - task: AzureCLI@1 + inputs: + azureSubscription: $(OLIVE_RG_SERVICE_CONNECTION) + scriptLocation: 'inlineScript' + inlineScript: make test + displayName: Test Olive + env: + OLIVEWHEELS_STORAGE_CONNECTION_STRING: $(olive-wheels-storage-connection-string) + WORKSPACE_SUBSCRIPTION_ID: $(workspace-subscription-id) + WORKSPACE_RESOURCE_GROUP: $(workspace-resource-group) + WORKSPACE_NAME: $(workspace-name) + AZURE_TENANT_ID: $(azure-tenant-id) + AZURE_CLIENT_ID: $(olive-rg-sp-id) + AZURE_CLIENT_SECRET: $(olive-rg-sp-secret) + PIPELINE: True + + - task: CredScan@3 + displayName: 'Run CredScan' + inputs: + debugMode: false + continueOnError: true + + - task: ComponentGovernanceComponentDetection@0 + inputs: + scanType: 'Register' + verbosity: 'Verbose' + alertWarningLevel: 'High' + displayName: Component Detection + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '**/*TestOlive*.xml' + testRunTitle: '$(Build.BuildNumber)[$(Agent.JobName)]' + displayName: Upload pipeline run test results + + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'Cobertura' + summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml' + + - script: make clean WINDOWS=$(WINDOWS) + condition: always() + displayName: Clean remaining artifacts - job: ${{parameters.name}}_Test_Examples @@ -80,7 +81,7 @@ jobs: variables: WINDOWS: ${{ parameters.windows }} runCodesignValidationInjection: false - GPU: ${{ parameters.gpu }} + device: ${{ parameters.device }} steps: - task: UsePythonVersion@0 @@ -88,14 +89,14 @@ jobs: versionSpec: 3.8 displayName: Use Python 3.8 - - script: make install-olive PIPELINE=True INSTALL_EXTRAS=[cpu] + - script: make install-olive PIPELINE=True INSTALL_EXTRAS=[$(device)] displayName: Install Olive - task: AzureCLI@1 inputs: azureSubscription: $(OLIVE_RG_SERVICE_CONNECTION) scriptLocation: 'inlineScript' - inlineScript: make test-examples IS_GPU=$(GPU) + inlineScript: make test-examples displayName: Test Examples env: OLIVEWHEELS_STORAGE_CONNECTION_STRING: $(olive-wheels-storage-connection-string) diff --git a/.azure_pipelines/olive-ci.yaml b/.azure_pipelines/olive-ci.yaml index a53366a35..0a9e111c2 100644 --- a/.azure_pipelines/olive-ci.yaml +++ b/.azure_pipelines/olive-ci.yaml @@ -79,7 +79,7 @@ jobs: name: Linux_GPU_CI pool: $(OLIVE_POOL_UBUNTU2004) windows: False - gpu: True + device: gpu examples: bert_cuda_gpu: exampleFolder: bert diff --git a/Makefile b/Makefile index 274917793..b52204dfb 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,6 @@ WINDOWS ?= False PIPELINE ?= False INSTALL_DEV_MODE ?= False -IS_GPU ?= False EXAMPLE_FOLDER ?= EXAMPLE_NAME ?= INSTALL_EXTRAS ?= @@ -44,7 +43,7 @@ test: .PHONY: test-examples test-examples: logs/ test-examples: - $(TEST_EXAMPLES_CMD) $(PIPELINE) $(CURRENT_DIR) $(EXAMPLE_FOLDER) $(EXAMPLE_NAME) $(IS_GPU) + $(TEST_EXAMPLES_CMD) $(PIPELINE) $(CURRENT_DIR) $(EXAMPLE_FOLDER) $(EXAMPLE_NAME) .PHONY: clean clean: diff --git a/examples/bert/README.md b/examples/bert/README.md index c991616c3..ca96fb41c 100644 --- a/examples/bert/README.md +++ b/examples/bert/README.md @@ -74,11 +74,14 @@ This workflow performs BERT optimization on GPU with CUDA/TensorRT. It performs ## How to run ### Pip requirements Install the necessary python packages: -``` -[CPU] +```sh +# [CPU] +pip install git+https://github.com/microsoft/Olive#egg=olive-ai[cpu] +# [GPU] +pip install git+https://github.com/microsoft/Olive#egg=olive-ai[gpu] + +# Other dependencies python -m pip install -r requirements.txt -[GPU] -python -m pip install -r requirements-gpu.txt ``` ### Run sample using config @@ -86,12 +89,12 @@ python -m pip install -r requirements-gpu.txt The optimization techniques to run are specified in the relevant config json file. First, install required packages according to passes. -``` +```sh python -m olive.workflows.run --config .json --setup ``` Then, optimize the model -``` +```sh python -m olive.workflows.run --config .json ``` diff --git a/examples/bert/conda.yaml b/examples/bert/conda.yaml index 7bc2923fc..bc940a03a 100644 --- a/examples/bert/conda.yaml +++ b/examples/bert/conda.yaml @@ -5,10 +5,9 @@ dependencies: - python=3.8.13 - pip=22.3.1 - pip: - - onnxruntime - datasets - evaluate - scipy - scikit-learn - transformers - - git+https://github.com/microsoft/Olive.git + - git+https://github.com/microsoft/Olive#egg=olive-ai[cpu] diff --git a/examples/bert/conda_gpu.yaml b/examples/bert/conda_gpu.yaml index 713e03861..84fb11990 100644 --- a/examples/bert/conda_gpu.yaml +++ b/examples/bert/conda_gpu.yaml @@ -5,11 +5,10 @@ dependencies: - python=3.8.13 - pip=22.3.1 - pip: - - onnxruntime-gpu - datasets - evaluate - psutil - scipy - scikit-learn - transformers - - git+https://github.com/microsoft/Olive.git + - git+https://github.com/microsoft/Olive#egg=olive-ai[gpu] diff --git a/examples/bert/requirements-gpu.txt b/examples/bert/requirements-gpu.txt deleted file mode 100644 index 1a31ca527..000000000 --- a/examples/bert/requirements-gpu.txt +++ /dev/null @@ -1,9 +0,0 @@ -azure-ai-ml -azure-identity -datasets -evaluate -docker -onnxruntime-gpu -scipy -scikit-learn -transformers diff --git a/examples/bert/requirements.txt b/examples/bert/requirements.txt index 5585a1709..c170d8ac7 100644 --- a/examples/bert/requirements.txt +++ b/examples/bert/requirements.txt @@ -3,7 +3,6 @@ azure-identity datasets evaluate docker -onnxruntime neural-compressor scipy scikit-learn diff --git a/scripts/test_examples.bat b/scripts/test_examples.bat index 929e88db8..371c10087 100644 --- a/scripts/test_examples.bat +++ b/scripts/test_examples.bat @@ -8,7 +8,6 @@ set PIPELINE=%1 set ROOT_DIR=%2 set EXAMPLE_FOLDER=%3 set EXAMPLE_NAME=%4 -set IS_GPU=%5 if "%PIPELINE%"=="True" ( call olive-venv\\Scripts\\activate.bat || goto :error @@ -24,11 +23,6 @@ call python -m pip install -r %ROOT_DIR%\\examples\\%EXAMPLE_FOLDER%\\requiremen call python -m pytest -v -s --log-cli-level=WARNING --junitxml=%ROOT_DIR%\\logs\\test_examples-TestOlive.xml^ %ROOT_DIR%\\examples\\test\\test_%EXAMPLE_NAME%.py || goto :error -if "%IS_GPU%"=="True" ( - call python -m pip uninstall onnxruntime -y || goto :error - call python -m pip install onnxruntime-gpu || goto :error -) - goto :EOF :error diff --git a/scripts/test_examples.sh b/scripts/test_examples.sh index 6b40cb819..bbff4b7ab 100644 --- a/scripts/test_examples.sh +++ b/scripts/test_examples.sh @@ -9,7 +9,6 @@ PIPELINE=$1 ROOT_DIR=$2 EXAMPLE_FOLDER=$3 EXAMPLE_NAME=$4 -IS_GPU=$5 echo $PIPELINE if [[ "$PIPELINE" == "True" ]]; then @@ -25,10 +24,4 @@ python -m pip install pytest echo "Testing examples" python -m pip install -r $ROOT_DIR/examples/$EXAMPLE_FOLDER/requirements.txt -echo $IS_GPU -if [[ "$IS_GPU" == "True" ]]; then - python -m pip uninstall onnxruntime -y - python -m pip install onnxruntime-gpu -fi - python -m pytest -v -s --log-cli-level=WARNING --junitxml=$ROOT_DIR/logs/test_examples-TestOlive.xml $ROOT_DIR/examples/test/test_$EXAMPLE_NAME.py