diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0f2cbc90a7640..bc92024edf42b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -27,7 +27,7 @@ jobs: if: github.repository_owner == 'taichi-dev' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Save new version run: | @@ -70,66 +70,49 @@ jobs: matrix: ${{ fromJson(needs.matrix_prep.outputs.matrix) }} runs-on: [self-hosted, cuda, vulkan, cn, release] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-linux-gpu-${{ github.sha }} - restore-keys: | - sccache-linux-gpu- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache + echo CI_DOCKER_RUN_EXTRA_ARGS="-v $(pwd):/home/dev/taichi" >> $GITHUB_ENV - name: Build run: | - mkdir -m777 shared - docker create --user dev --name taichi_build --gpus all -v /tmp/.X11-unix:/tmp/.X11-unix \ - -e DISPLAY -e PY -e GPU_BUILD -e TAICHI_CMAKE_ARGS -e PROJECT_NAME \ + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-build \ registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ - /home/dev/taichi/.github/workflows/scripts/unix_build.sh - tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx --owner 1000 --group 1000 | docker cp - taichi_build:/home/dev/ - docker start -a taichi_build - docker cp taichi_build:/home/dev/taichi/dist shared/dist - docker cp taichi_build:/home/dev/taichi/build shared/build + /home/dev/taichi/.github/workflows/scripts/unix-build.sh env: PY: ${{ matrix.conda_python }} - GPU_BUILD: ON - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=ON -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=ON -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PROJECT_NAME: ${{ matrix.name }} - DISPLAY: ':1' + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=ON + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=ON + -DTI_BUILD_TESTS:BOOL=ON - name: Archive Wheel Artifacts uses: actions/upload-artifact@v3 with: name: dist - path: shared/dist/*.whl + path: dist/*.whl retention-days: 20 - name: Test run: | - docker create --user dev --name taichi_test --gpus all -v /tmp/.X11-unix:/tmp/.X11-unix \ - -e DISPLAY -e PY -e GPU_TEST \ - registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ - /home/dev/unix_test.sh - docker cp .github/workflows/scripts/unix_test.sh taichi_test:/home/dev/unix_test.sh - docker cp .github/workflows/scripts/common-utils.sh taichi_test:/home/dev/common-utils.sh - docker cp ./requirements_test.txt taichi_test:/home/dev/requirements_test.txt - docker cp shared/dist/ taichi_test:/home/dev/ - docker cp shared/build/ taichi_test:/home/dev/ - docker cp pyproject.toml taichi_test:/home/dev/ - docker cp tests/ taichi_test:/home/dev/ - docker start -a taichi_test + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-test \ + registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ + /home/dev/taichi/.github/workflows/scripts/unix_test.sh env: PY: ${{ matrix.conda_python }} - GPU_TEST: ON - DISPLAY: ':1' - - - name: clean docker container - if: always() - run: | - docker rm taichi_build taichi_test -f build_and_test_manylinux2014: name: Build and Upload (manylinux2014) @@ -139,66 +122,52 @@ jobs: matrix: ${{ fromJson(needs.matrix_prep.outputs.matrix) }} runs-on: [self-hosted, cuda, cn, release] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: - submodules: "recursive" + submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-manylinux-${{ github.sha }} - restore-keys: | - sccache-manylinux- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache + echo CI_DOCKER_RUN_EXTRA_ARGS="-v $(pwd):/home/dev/taichi" >> $GITHUB_ENV - name: Build run: | - mkdir -m777 shared - docker create --user dev --name taichi_build --gpus all \ - -e PY -e GPU_BUILD -e TAICHI_CMAKE_ARGS -e PROJECT_NAME \ - registry.taichigraphics.com/taichidev-manylinux2014-cuda:v0.0.0 \ - /home/dev/taichi/.github/workflows/scripts/unix_build.sh - tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx --owner 1000 --group 1000 | docker cp - taichi_build:/home/dev/ - docker start -a taichi_build - docker cp taichi_build:/home/dev/taichi/dist shared/dist - docker cp taichi_build:/home/dev/taichi/build shared/build + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-test \ + registry.taichigraphics.com/taichidev-manylinux2014-cuda:v0.0.0 \ + /home/dev/taichi/.github/workflows/scripts/unix-build.sh + env: PY: ${{ matrix.conda_python }} - GPU_BUILD: ON - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=OFF -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PROJECT_NAME: ${{ matrix.name }} + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=ON - name: Archive Wheel Artifacts uses: actions/upload-artifact@v3 with: name: dist - path: shared/dist/*.whl + path: dist/*.whl retention-days: 20 - name: Test run: | - docker create --user dev --name taichi_test --gpus all -e TI_WANTED_ARCHS \ - -e PY -e GPU_TEST \ - registry.taichigraphics.com/taichidev-manylinux2014-cuda:v0.0.0 \ - /home/dev/unix_test.sh - docker cp .github/workflows/scripts/unix_test.sh taichi_test:/home/dev/unix_test.sh - docker cp .github/workflows/scripts/common-utils.sh taichi_test:/home/dev/common-utils.sh - docker cp ./requirements_test.txt taichi_test:/home/dev/requirements_test.txt - docker cp shared/dist/ taichi_test:/home/dev/ - docker cp shared/build/ taichi_test:/home/dev/ - docker cp pyproject.toml taichi_test:/home/dev/ - docker cp tests/ taichi_test:/home/dev/ - docker start -a taichi_test + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-test \ + registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ + /home/dev/taichi/.github/workflows/scripts/unix_test.sh env: PY: ${{ matrix.conda_python }} - GPU_TEST: ON TI_WANTED_ARCHS: "cuda,cpu" - - name: clean docker container - if: always() - run: | - docker rm taichi_build taichi_test -f - build_and_test_mac: name: Build and Upload (macOS only) needs: matrix_prep @@ -209,17 +178,15 @@ jobs: env: PY: ${{ matrix.python }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-mac-${{ github.sha }} - restore-keys: | - sccache-mac- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache - name: Download Pre-Built LLVM 10.0.0 run: python misc/ci_download.py @@ -230,14 +197,15 @@ jobs: run: | brew install molten-vk export PATH=$(pwd)/taichi-llvm/bin/:$PATH - export PATH=$(ls -d ~/mini*/envs/$PY/bin):$PATH - bash .github/workflows/scripts/unix_build.sh - brew uninstall molten-vk - echo PATH=$PATH >> $GITHUB_ENV + .github/workflows/scripts/unix-build.sh env: - TAICHI_CMAKE_ARGS: -DTI_WITH_VULKAN:BOOL=ON -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PROJECT_NAME: ${{ matrix.name }} CXX: clang++ + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_VULKAN:BOOL=ON + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=ON - name: Archive Wheel Artifacts uses: actions/upload-artifact@v3 @@ -263,32 +231,30 @@ jobs: run: shell: '/usr/bin/arch -arch arm64e /bin/bash --noprofile --norc -eo pipefail {0}' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-m1-${{ github.sha }} - restore-keys: | - sccache-m1- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache - name: Build run: | brew install molten-vk - # We hacked here because conda activate in CI won't update python PATH - # automatically. So we don't activate and use desired python version - # directly. - export PATH=/Users/github/miniforge3/envs/$PYTHON/bin:$PATH - bash .github/workflows/scripts/unix_build.sh - brew uninstall molten-vk + .github/workflows/scripts/unix-build.sh env: - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CUDA:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=ON -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PROJECT_NAME: ${{ matrix.name }} - PYTHON: ${{ matrix.python }} + PY: ${{ matrix.python }} CXX: clang++ + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CUDA:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=ON + -DTI_BUILD_TESTS:BOOL=ON - name: Archive Wheel Artifacts uses: actions/upload-artifact@v3 @@ -299,11 +265,10 @@ jobs: - name: Test run: | - export PATH=/Users/github/miniforge3/envs/$PYTHON/bin:$PATH .github/workflows/scripts/unix_test.sh env: TI_WANTED_ARCHS: 'metal,vulkan,cpu' - PYTHON: ${{ matrix.python }} + PY: ${{ matrix.python }} GPU_TEST: ON build_and_test_macos_1014: @@ -314,32 +279,32 @@ jobs: matrix: ${{ fromJson(needs.matrix_prep.outputs.matrix) }} runs-on: [self-hosted, macos10.14] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-1014-${{ github.sha }} - restore-keys: | - sccache-1014- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache - name: Build run: | - # We hacked here because conda activate in CI won't update python PATH - # automatically. So we don't activate and use desired python version - # directly. - export PATH=/Users/buildbot6/miniconda3/envs/$PYTHON/bin:$PATH + # TODO: should embed LLVM as submodule export LLVM_DIR=/Users/buildbot6/taichi-llvm-10.0.0-macos export PATH=$LLVM_DIR/bin:$PATH - bash .github/workflows/scripts/unix_build.sh + .github/workflows/scripts/unix-build.sh env: - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CUDA:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=OFF -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PROJECT_NAME: ${{ matrix.name }} - PYTHON: ${{ matrix.python }} + PY: ${{ matrix.python }} CXX: clang++ + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CUDA:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=ON - name: Archive Wheel Artifacts uses: actions/upload-artifact@v3 @@ -350,11 +315,10 @@ jobs: - name: Test run: | - export PATH=/Users/buildbot6/miniconda3/envs/$PYTHON/bin:$PATH .github/workflows/scripts/unix_test.sh env: TI_WANTED_ARCHS: 'cpu' - PYTHON: ${{ matrix.python }} + PY: ${{ matrix.python }} build_and_test_windows: name: Build and Upload (Windows only) @@ -362,35 +326,24 @@ jobs: strategy: fail-fast: false matrix: ${{ fromJson(needs.matrix_prep.outputs.matrix) }} - runs-on: windows-latest + runs-on: [self-hosted, windows, cuda, OpenGL] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - - name: Add Visual Studio Shell to ENV - uses: egor-tensin/vs-shell@v2 - with: - arch: x64 - - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: ccache_cache - key: ccache-win64-clang-${{ github.sha }} - restore-keys: | - ccache-win64-clang- - - - name: Build Python Wheel - shell: powershell + - name: Build + shell: pwsh + if: ${{ needs.check_files.outputs.run_job != 'false' }} run: | - .\.github\workflows\scripts\win_build.ps1 -installVulkan -libsDir C:\ - venv\Scripts\python -m pip install $(dir dist\*.whl) + .\.github\workflows\scripts\win_build.ps1 -llvmVer 10 -installVulkan -libsDir "$env:LocalAppData/buildbot" env: + PY: ${{ matrix.python }} PROJECT_NAME: ${{ matrix.name }} - name: Archive Wheel Artifacts @@ -401,16 +354,12 @@ jobs: retention-days: 20 - name: Test - shell: powershell + shell: pwsh run: | - $env:PATH = ";C:\taichi_llvm\bin;C:\taichi_clang\bin;" + $env:PATH - . venv\Scripts\activate.ps1 - python -c "import taichi" - pip install torch - pip install -r requirements_test.txt - ti diagnose - python tests/run_tests.py -vr2 -t2 + .\.github\workflows\scripts\win_test.ps1 -libsDir "$env:LocalAppData/buildbot" env: + PY: ${{ matrix.python }} + TI_WANTED_ARCHS: "cpu,cuda,opengl,vulkan" TI_SKIP_VERSION_CHECK: ON upload_to_pypi: @@ -418,7 +367,7 @@ jobs: needs: [build_and_test_linux, build_and_test_manylinux2014, build_and_test_mac, build_and_test_m1, build_and_test_macos_1014, build_and_test_windows] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: 3.9 @@ -446,7 +395,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.inputs.version }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: 3.9 diff --git a/.github/workflows/scripts/android-build.sh b/.github/workflows/scripts/android-build.sh index 2f9df6681601e..339d01e2311cb 100755 --- a/.github/workflows/scripts/android-build.sh +++ b/.github/workflows/scripts/android-build.sh @@ -14,6 +14,7 @@ setup-sccache-local setup_python setup-android-ndk-env +python -m pip install -r requirements_dev.txt python setup.py clean python setup.py build_ext cd build diff --git a/.github/workflows/scripts/common-utils.ps1 b/.github/workflows/scripts/common-utils.ps1 new file mode 100644 index 0000000000000..e8e5dd9555628 --- /dev/null +++ b/.github/workflows/scripts/common-utils.ps1 @@ -0,0 +1,279 @@ +function Info($text, $prefix="BUILD") { + Write-Host -ForegroundColor Green "[$prefix] $text" +} + +function ProbeVirtualMemory { + $MethodDefinition = @" +using System; +using System.Runtime.InteropServices; + +public class VirtualProbe { + [DllImport("kernel32.dll", SetLastError = true)] + public static extern IntPtr GetCurrentProcess(); + [DllImport("kernel32.dll", SetLastError=true, ExactSpelling=true)] + public static extern IntPtr VirtualAllocEx(IntPtr hProcess, IntPtr lpAddress, IntPtr dwSize, uint flAllocationType, uint flProtect); + [DllImport("kernel32.dll", SetLastError=true, ExactSpelling=true)] + public static extern bool VirtualFreeEx(IntPtr hProcess, IntPtr lpAddress, IntPtr dwSize, uint dwFreeType); +} +"@ + if(-not ('VirtualProbe' -as [type])) { + Add-Type -TypeDefinition $MethodDefinition -Language CSharp + } + $totalSz = 0 + $attempt = [int64](512 * 1024 * 1024 * 1024) + $allocated = @() + $me = [VirtualProbe]::GetCurrentProcess() + while($attempt -gt 256 * 1024 * 1024) { + $ptr = [VirtualProbe]::VirtualAllocEx($me, 0, $attempt, 0x3000, 0x01) # MEM_COMMIT | MEM_RESERVE, PAGE_NOACCESS + if($ptr -eq 0) { + $attempt = [int64]($attempt / 2) + } else { + $allocated += $ptr + $totalSz += $attempt + } + } + + foreach($ptr in $allocated) { + $_ = [VirtualProbe]::VirtualFreeEx($me, $ptr, 0, 0x8000) # MEM_RELEASE + } + + return $totalSz +} + +function EstimateNumProcs($expectedPhysicalMemoryUsage = 512, $expectedVirtualMemoryUsage = 2048) { + # Things on Windows is complicated. + # Windows never overcommits, so we can't just use physical memory to estimate + # parallelism, pagefile (aka. swap on Linux but with subtle difference) must + # also be considered. What's more irratating is that pagefile is not + # a fixed size, it can grow on demand, Win32_OperatingSystem.FreeVirtualMemory + # is therefore almost always smaller than what you can really use. + # Aaaand, there are containers, where you can't even get the disk size or + # pagefile status. + # So we just probe the available virtual memory size. + $os = Get-CimInstance -ClassName Win32_OperatingSystem + $phys = $os.TotalVisibleMemorySize / 1024 + $maxVirt = [int]($(ProbeVirtualMemory) / 1024 / 1024) + $procsByVirtMem = [int]($maxVirt / $expectedVirtualMemoryUsage) + $procsByPhysMem = [int](($phys - 1024) / $expectedPhysicalMemoryUsage) + $procsByProcessors = [int]($env:NUMBER_OF_PROCESSORS) + Info "Physical memory: $phys MB" "ESTIMATE" + Info "Allocatable (virtual) memory: $maxVirt MB" "ESTIMATE" + Info "Processors: $procsByProcessors" "ESTIMATE" + Info "Parallelism by physical memory: $procsByPhysMem" "ESTIMATE" + Info "Parallelism by allocatable memory: $procsByVirtMem" "ESTIMATE" + $procs = [int]([Math]::Min($procsByPhysMem, [Math]::Min($procsByVirtMem, $procsByProcessors))) + Info "Parallelism: $procs" "ESTIMATE" + # TODO: GPU memory + return $procs +} + +function Setup-VS { + Info "Setting up Visual Studio" + foreach($progRoot in $env:ProgramFiles, ${env:ProgramFiles(x86)}) { + $vsBase = Join-Path $progRoot 'Microsoft Visual Studio' + foreach($ver in '2022','2019') { + foreach($edition in 'Enterprise','Professional','Community','BuildTools') { + $vsPath = Join-Path $vsBase $ver $edition + $clangPath = Join-Path $vsPath "VC\Tools\Llvm\x64\bin\clang.exe" + if (Test-Path $clangPath) { + $devShellModule = Get-Item $(Join-Path $vsPath "Common7\Tools\Microsoft.VisualStudio.DevShell.dll") + Import-Module $devShellModule + Enter-VsDevShell -VsInstallPath $vsPath -SkipAutomaticLocation -DevCmdArguments "-arch=x64" + return + } + } + } + } + + throw "Could not find Visual Studio with Clang" +} + +function Setup-Python($libsDir, $version = "3.7") { + if ([string]::IsNullOrEmpty($version)) { + throw "Should specify Python version" + } + + Info "Setting up Python environment $version" + + function PipOps { + Invoke python -m pip install -U pip wheel + Invoke python -m pip uninstall taichi taichi-nightly -y + # These have to be re-installed to avoid strange certificate issue + # on CPU docker environment + Invoke python -m pip install --upgrade --force-reinstall numpy cmake wheel + } + + if (Get-Command python -ErrorAction SilentlyContinue) { + $ver = & python --version + Info "Found $ver" "Python" + if ($ver.Startswith("Python ${version}.")) { + Info "Using $ver" "Python" + $venv = "$libsDir/taichi-venv-$version" + if(-not (Test-Path $venv)) { + Invoke python -m venv $venv + } + . "$libsDir/taichi-venv-$version/Scripts/activate.ps1" + PipOps + return + } + } + + if (Get-Command conda -ErrorAction SilentlyContinue) { + Info "Using conda environment" "Python" + # bad conda in container + Invoke conda shell.powershell hook | Out-String | Invoke-Expression + # + $condaEnv = "$libsDir/taichi-conda-$version" + if (-not (Test-Path $condaEnv)) { + conda create -y -q --prefix=$condaEnv python=$version + } + conda activate $condaEnv + PipOps + return + } + + throw "Could not setup Python" +} + +function Resolve-Path-String-Force { + <# + .SYNOPSIS + Calls Resolve-Path but works for files that don't exist. + .REMARKS + From http://devhawk.net/blog/2010/1/22/fixing-powershells-busted-resolve-path-cmdlet + #> + param ( + [string] $FileName + ) + + $FileName = Resolve-Path $FileName -ErrorAction SilentlyContinue ` + -ErrorVariable _frperror + if (-not($FileName)) { + $FileName = $_frperror[0].TargetObject + } + + return $FileName +} + +function Invoke() { + # https://stackoverflow.com/questions/47032005/why-does-a-powershell-script-not-end-when-there-is-a-non-zero-exit-code-using-th + # A handy way to run a command, and automatically throw an error if the + # exit code is non-zero. + # + # + if ($args.Count -eq 0) { + throw "Must supply some arguments." + } + + Write-Host -ForegroundColor Blue "[CMD] $args" + + $command = $args[0] + $commandArgs = @() + if ($args.Count -gt 1) { + $commandArgs = $args[1..($args.Count - 1)] + } + + & $command $commandArgs + $ok = $? + $result = $LASTEXITCODE + + if (-not $ok) { + throw "$command $commandArgs failed." + } + + if ($result -ne 0) { + throw "$command $commandArgs exited with code $result." + } +} + + +# SCCache not working on Windows (reports UnknownFlag with -Xclang) +# Using CCache here +function SetupCCacheLocal($root) { + $root = Resolve-Path-String-Force $root + + New-Item -ItemType Directory -Path "$root\cache" -ErrorAction SilentlyContinue + + $env:CCACHE_DIR = "$root/cache" + $env:CCACHE_TEMPDIR = "$env:TEMP/ccache-temp" + $env:CCACHE_MAXSIZE= "10G" + $env:CCACHE_LOG_FILE = "$root/ccache_error.log" + + if (-not (Test-Path -Path "$root/bin/ccache.exe")) { + Info "Downloading sccache" + Push-Location "$root" + + Invoke-WebRequest ` + -Uri "https://github.com/ccache/ccache/releases/download/v4.5.1/ccache-4.5.1-windows-64.zip" ` + -MaximumRetryCount 10 -RetryIntervalSec 300 ` + -OutFile ccache.zip + Expand-Archive -Force ccache.zip . + Rename-Item -Force -Path "ccache-4.5.1-windows-64" -NewName "bin" + Pop-Location + } + + $env:PATH += ";$root/bin" + $env:TAICHI_CMAKE_ARGS += " -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" + + ccache -s -v +} + +function ClearTaichiOfflineCache { + # Remove-Item -Force -Path "$env:LocalAppData/build-cache/dot-cache/" -Recurse -ErrorAction SilentlyContinue +} + +function PrepareBuildCache { + "git-cache","pip-cache" | % { + New-Item -ItemType Directory -Path "$env:LocalAppData/build-cache/$_" -ErrorAction SilentlyContinue + } + Push-Location $env:LocalAppData/build-cache/git-cache + if (Test-Path -Path objects) { + Invoke git init --bare + } + Pop-Location +} + +function CIDockerRun { + $containerName = $null + $shouldRm = $true + + for($i = 0; $i -lt $args.Count; $i++) { + $v = $args[$i] + if($v -eq "-n" -or $v -eq "--name") { + $containerName = $args[$i+1] + $shouldRm = $false + $i++ + } + } + + if($containerName) { + Invoke docker rm -f $containerName + } + + if($shouldRm -and -not $args.Contains("--rm")) { + $args = ,("--rm") + $args + } + + $TiEnvs = @() + Get-ChildItem "env:*" | % { + if($_.Key.Startswith("TI_")) { + $TiEnvs += "-e", $_.Key + } + } + + $extraArgs = ($env:CI_DOCKER_RUN_EXTRA_ARGS ?? "").Trim().Split() + + Invoke docker run ` + -i ` + -e PY ` + -e PROJECT_NAME ` + -e TAICHI_CMAKE_ARGS ` + -e IN_DOCKER=true ` + -e PIP_CACHE_DIR=X:/pip-cache ` + -e GIT_ALTERNATE_OBJECT_DIRECTORIES=X:/git-cache/objects ` + -e TI_CI=1 ` + @TiEnvs ` + -v (($env:LocalAppData -replace "\\", "/") + "/build-cache:X:") ` + @extraArgs ` + @args +} diff --git a/.github/workflows/scripts/common-utils.sh b/.github/workflows/scripts/common-utils.sh index 39f59d2f0a933..1b4e5a27b36b3 100644 --- a/.github/workflows/scripts/common-utils.sh +++ b/.github/workflows/scripts/common-utils.sh @@ -1,48 +1,17 @@ #!/bin/bash -check_in_docker() { - # This is a temporary solution to detect in a docker, but it should work - if [[ $(whoami) == "dev" ]]; then - echo "true" - else - echo "false" - fi -} - -setup_sccache() { - export SCCACHE_DIR=$(pwd)/sccache_cache - export SCCACHE_CACHE_SIZE="128M" - export SCCACHE_LOG=error - export SCCACHE_ERROR_LOG=$(pwd)/sccache_error.log - mkdir -p "$SCCACHE_DIR" - echo "sccache dir: $SCCACHE_DIR" - ls -la "$SCCACHE_DIR" - - if [[ $OSTYPE == "linux-"* ]]; then - wget https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-x86_64-unknown-linux-musl.tar.gz - tar -xzf sccache-v0.2.15-x86_64-unknown-linux-musl.tar.gz - chmod +x sccache-v0.2.15-x86_64-unknown-linux-musl/sccache - export PATH=$(pwd)/sccache-v0.2.15-x86_64-unknown-linux-musl:$PATH - elif [[ $(uname -m) == "arm64" ]]; then - wget https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-aarch64-apple-darwin.tar.gz - tar -xzf sccache-v0.2.15-aarch64-apple-darwin.tar.gz - chmod +x sccache-v0.2.15-aarch64-apple-darwin/sccache - export PATH=$(pwd)/sccache-v0.2.15-aarch64-apple-darwin:$PATH - else - wget https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-x86_64-apple-darwin.tar.gz - tar -xzf sccache-v0.2.15-x86_64-apple-darwin.tar.gz - chmod +x sccache-v0.2.15-x86_64-apple-darwin/sccache - export PATH=$(pwd)/sccache-v0.2.15-x86_64-apple-darwin:$PATH - fi -} +set -x setup_python() { - if [[ "$(check_in_docker)" == "true" ]]; then - source $HOME/miniconda/etc/profile.d/conda.sh - conda activate "$PY" - fi + for conda in miniconda miniconda3 miniforge3; do + if [[ -d $HOME/$conda ]]; then + source $HOME/$conda/bin/activate + conda activate "$PY" + break + fi + done + python3 -m pip install -U pip python3 -m pip uninstall taichi taichi-nightly -y - python3 -m pip install -r requirements_dev.txt } function setup-sccache-local { @@ -81,6 +50,110 @@ function setup-sccache-local { export TAICHI_CMAKE_ARGS="$TAICHI_CMAKE_ARGS -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache" } +function clear-taichi-offline-cache { + rm -rf ~/build-cache/dot-cache/taichi # Clear taichi offline cache + rm -rf ~/.cache/taichi # Clear taichi offline cache +} + +function prepare-build-cache { + export CACHE_HOME=${1:-$HOME/build-cache} + mkdir -p $CACHE_HOME/{dot-cache/pip,dot-gradle,git-cache,sccache/{bin,cache}} + chmod 0777 $CACHE_HOME/ $CACHE_HOME/* || true + pushd $CACHE_HOME/git-cache + if [ ! -d objects ]; then git init --bare; fi + popd + + clear-taichi-offline-cache + + if [ ! -z $GITHUB_ENV ]; then + # for bare metal run + export SCCACHE_ROOT=$CACHE_HOME/sccache + export GIT_ALTERNATE_OBJECT_DIRECTORIES=$CACHE_HOME/git-cache/objects + echo SCCACHE_ROOT=$SCCACHE_ROOT >> $GITHUB_ENV + echo GIT_ALTERNATE_OBJECT_DIRECTORIES=$GIT_ALTERNATE_OBJECT_DIRECTORIES >> $GITHUB_ENV + echo CACHE_HOME=$CACHE_HOME >> $GITHUB_ENV + else + # container run + true + fi +} + +function fix-build-cache-permission { + if [ "$(uname -s)" = "Linux" ]; then + sudo -n chown -R $(id -u):$(id -g) $CACHE_HOME || true + fi +} + +function ci-docker-run { + ARGS="$@" + SHOULD_RM="--rm" + while [[ $# > 0 ]]; do + case $1 in + -n | --name) + shift + CONTAINER_NAME="$1" + SHOULD_RM="" + break + ;; + esac + shift + done + + if [ ! -z $CONTAINER_NAME ]; then + docker rm -f $CONTAINER_NAME + fi + + TI_ENVS="" + for i in $(env | grep ^TI_); do + TI_ENVS="$TI_ENVS -e $i" + done + + CACHE_HOME=${CACHE_HOME:-$HOME/build-cache} + + docker run \ + -i \ + $SHOULD_RM \ + --user dev \ + -e PY \ + -e PROJECT_NAME \ + -e TAICHI_CMAKE_ARGS \ + -e IN_DOCKER=true \ + -e TI_CI=1 \ + $TI_ENVS \ + -e SCCACHE_ROOT=/var/lib/sccache \ + -e CACHE_HOME=/var/lib/cache-home \ + -e GIT_ALTERNATE_OBJECT_DIRECTORIES=/var/lib/git-cache/objects \ + -v $(readlink -f $CACHE_HOME):/var/lib/cache-home \ + -v $(readlink -f $CACHE_HOME/sccache):/var/lib/sccache \ + -v $(readlink -f $CACHE_HOME/git-cache):/var/lib/git-cache \ + -v $(readlink -f $CACHE_HOME/dot-cache):/home/dev/.cache \ + -v $(readlink -f $CACHE_HOME/dot-gradle):/home/dev/.gradle \ + $CI_DOCKER_RUN_EXTRA_ARGS \ + $ARGS +} + +function ci-docker-run-gpu { + for i in {0..9}; do + if xset -display ":$i" -q >/dev/null 2>&1; then + break + fi + done + + if [ $? -ne 0 ]; then + echo "No display!" + exit 1 + fi + + ci-docker-run \ + --runtime=nvidia \ + --gpus 'all,"capabilities=graphics,utility,display,video,compute"' \ + -e DISPLAY=:$i \ + -e GPU_BUILD=ON \ + -e GPU_TEST=ON \ + -v /tmp/.X11-unix:/tmp/.X11-unix \ + $@ +} + function setup-android-ndk-env { export ANDROID_NDK_ROOT=/android-sdk/ndk-bundle export ANDROID_CMAKE_ARGS="-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake -DANDROID_NATIVE_API_LEVEL=29 -DANDROID_ABI=arm64-v8a" diff --git a/.github/workflows/scripts/unix-build-v2.sh b/.github/workflows/scripts/unix-build.sh similarity index 84% rename from .github/workflows/scripts/unix-build-v2.sh rename to .github/workflows/scripts/unix-build.sh index 1c64976c18d12..32c8322892807 100755 --- a/.github/workflows/scripts/unix-build-v2.sh +++ b/.github/workflows/scripts/unix-build.sh @@ -1,15 +1,15 @@ #!/bin/bash -# TODO: replace unix_build.sh -# currently only used in android build job set -ex . $(dirname $0)/common-utils.sh -IN_DOCKER=$(check_in_docker) [[ "$IN_DOCKER" == "true" ]] && cd taichi +# TODO: Move llvm installation from container image to here + build_taichi_wheel() { + python3 -m pip install -r requirements_dev.txt git fetch origin master --tags PROJECT_TAGS="" EXTRA_ARGS="" @@ -27,9 +27,11 @@ build_taichi_wheel() { python3 misc/make_changelog.py --ver origin/master --repo_dir ./ --save python3 setup.py $PROJECT_TAGS bdist_wheel $EXTRA_ARGS - sccache -s + sccache -s || true } +fix-build-cache-permission + setup-sccache-local setup_python diff --git a/.github/workflows/scripts/unix_build.sh b/.github/workflows/scripts/unix_build.sh deleted file mode 100755 index d5565de735b15..0000000000000 --- a/.github/workflows/scripts/unix_build.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -set -ex - -. $(dirname $0)/common-utils.sh - -IN_DOCKER=$(check_in_docker) -[[ "$IN_DOCKER" == "true" ]] && cd taichi - -build_taichi_wheel() { - git fetch origin master --tags - PROJECT_TAGS="" - EXTRA_ARGS="" - if [ "$PROJECT_NAME" = "taichi-nightly" ]; then - PROJECT_TAGS="egg_info --tag-date" - fi - - if [[ $OSTYPE == "linux-"* ]]; then - if [ -f /etc/centos-release ] ; then - EXTRA_ARGS="-p manylinux2014_x86_64" - else - EXTRA_ARGS="-p manylinux_2_27_x86_64" - fi - fi - python3 misc/make_changelog.py --ver origin/master --repo_dir ./ --save - - exec env TAICHI_CMAKE_ARGS="${TAICHI_CMAKE_ARGS}" python3 setup.py $PROJECT_TAGS bdist_wheel $EXTRA_ARGS - sccache -s -} - -setup_sccache -setup_python - -build_taichi_wheel -NUM_WHL=$(ls dist/*.whl | wc -l) -if [ $NUM_WHL -ne 1 ]; then echo "ERROR: created more than 1 whl." && exit 1; fi - -chmod -R 777 "$SCCACHE_DIR" -rm -f python/CHANGELOG.md diff --git a/.github/workflows/scripts/unix_test.sh b/.github/workflows/scripts/unix_test.sh index 0dc325118df66..ea8a0ae4e7a6d 100755 --- a/.github/workflows/scripts/unix_test.sh +++ b/.github/workflows/scripts/unix_test.sh @@ -7,15 +7,13 @@ export PYTHONUNBUFFERED=1 export TI_SKIP_VERSION_CHECK=ON export TI_CI=1 -export TI_IN_DOCKER=$(check_in_docker) export LD_LIBRARY_PATH=$PWD/build/:$LD_LIBRARY_PATH export TI_OFFLINE_CACHE_FILE_PATH=$PWD/.cache/taichi +setup_python + +[[ "$IN_DOCKER" == "true" ]] && cd taichi -if [[ "$TI_IN_DOCKER" == "true" ]]; then - source $HOME/miniconda/etc/profile.d/conda.sh - conda activate "$PY" -fi python3 -m pip install dist/*.whl if [ -z "$GPU_TEST" ]; then python3 -m pip install -r requirements_test.txt diff --git a/.github/workflows/scripts/win_build.ps1 b/.github/workflows/scripts/win_build.ps1 index dba6d15e0cc9c..13da3ce821219 100644 --- a/.github/workflows/scripts/win_build.ps1 +++ b/.github/workflows/scripts/win_build.ps1 @@ -11,126 +11,105 @@ param ( $ErrorActionPreference = "Stop" -$RepoURL = 'https://github.com/taichi-dev/taichi' +. $PSScriptRoot\common-utils.ps1 -function WriteInfo($text) { - Write-Host -ForegroundColor Green "[BUILD] $text" +$libsDir = Resolve-Path-String-Force $libsDir +if (-not (Test-Path $libsDir)) { + New-Item -ItemType Directory -Path $libsDir } -# Get sccache -$env:CCACHE_DIR="${pwd}/ccache_cache" -$env:CCACHE_MAXSIZE="128M" -$env:CCACHE_LOGFILE="${pwd}/ccache_error.log" -WriteInfo("ccache dir: $Env:CCACHE_DIR") -md "$Env:CCACHE_DIR" -ea 0 -if (-not (Test-Path "ccache-4.5.1-windows-64")) { - curl.exe --retry 10 --retry-delay 5 https://github.com/ccache/ccache/releases/download/v4.5.1/ccache-4.5.1-windows-64.zip -LO - 7z x ccache-4.5.1-windows-64.zip - $env:PATH += ";${pwd}/ccache-4.5.1-windows-64" -} -ccache -v -s +$RepoURL = 'https://github.com/taichi-dev/taichi' -# WriteInfo("Install 7Zip") -# Install-Module 7Zip4PowerShell -Force -Verbose -Scope CurrentUser +SetupCCacheLocal "$libsDir/ccache" if ($clone) { - WriteInfo("Clone the repository") - git clone --recurse-submodules $RepoURL + Info("Clone the repository") + Invoke git clone --recurse-submodules $RepoURL Set-Location .\taichi } -$libsDir = (Resolve-Path $libsDir).Path +Setup-VS -if (-not (Test-Path $libsDir)) { - New-Item -ItemType Directory -Path $libsDir -} Push-Location $libsDir -if (!$llvmVer.CompareTo("10")) { - if (-not (Test-Path "taichi_llvm")) { - WriteInfo("Download and extract LLVM") - curl.exe --retry 10 --retry-delay 5 https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/taichi-llvm-10.0.0-msvc2019.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x taichi-llvm-10.0.0-msvc2019.zip -otaichi_llvm - } - if (-not (Test-Path "taichi_clang")) { - WriteInfo("Download and extract Clang") - curl.exe --retry 10 --retry-delay 5 https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/clang-10.0.0-win.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x clang-10.0.0-win.zip -otaichi_clang - } -} else { - if (-not (Test-Path "taichi_llvm_15")) { - WriteInfo("Download and extract LLVM") - curl.exe --retry 10 --retry-delay 5 https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2019_clang/taichi-llvm-15.0.0-msvc2019.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x taichi-llvm-15.0.0-msvc2019.zip -otaichi_llvm_15 +function DownloadDep { + param ( + [string]$name, + [string]$outfile, + [string]$dir, + [string]$url + ) + if (-not (Test-Path $dir)) { + Info("Download and extract $name") + Invoke-WebRequest ` + -Uri $url ` + -MaximumRetryCount 10 -RetryIntervalSec 5 ` + -OutFile $outfile + Expand-Archive -Force $outfile $dir } - if (-not (Test-Path "taichi_clang_15")) { - WriteInfo("Download and extract Clang") - curl.exe --retry 10 --retry-delay 5 https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2022_clang/clang-15.0.0-win.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x clang-15.0.0-win.zip -otaichi_clang_15 - } } - -if (!$llvmVer.CompareTo("10")) { +if ($llvmVer -eq "10") { + DownloadDep LLVM llvm.zip taichi_llvm ` + https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/taichi-llvm-10.0.0-msvc2019.zip + DownloadDep Clang clang.zip taichi_clang ` + https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/clang-10.0.0-win.zip $env:LLVM_DIR = "$libsDir\taichi_llvm" -} else { + $env:TAICHI_CMAKE_ARGS += " -DCLANG_EXECUTABLE=$($libsDir -replace "\\", "\\")\\taichi_clang\\bin\\clang++.exe" + $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=$($libsDir -replace "\\", "\\")\\taichi_llvm\\bin\\llvm-as.exe" +} elseif ($llvmVer -eq "15") { + DownloadDep LLVM-15 llvm-15.zip taichi_llvm_15 ` + https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2019_clang/taichi-llvm-15.0.0-msvc2019.zip + DownloadDep Clang-15 clang-15.zip taichi_clang_15 ` + https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2022_clang/clang-15.0.0-win.zip $env:LLVM_DIR = "$libsDir\taichi_llvm_15" + $env:TAICHI_CMAKE_ARGS += " -DCLANG_EXECUTABLE=$($libsDir -replace "\\", "\\")\\taichi_clang_15\\bin\\clang++.exe" + $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=$($libsDir -replace "\\", "\\")\\taichi_llvm_15\\bin\\llvm-as.exe" + $env:TAICHI_CMAKE_ARGS += " -DTI_LLVM_15:BOOL=ON" +} else { + throw "Unsupported LLVM version" } $env:TAICHI_CMAKE_ARGS += " -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang" + if ($installVulkan) { - WriteInfo("Download and install Vulkan") - if (-not (Test-Path "VulkanSDK")) { - curl.exe --retry 10 --retry-delay 5 https://sdk.lunarg.com/sdk/download/1.2.189.0/windows/VulkanSDK-1.2.189.0-Installer.exe -Lo VulkanSDK.exe - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - $installer = Start-Process -FilePath VulkanSDK.exe -Wait -PassThru -ArgumentList @("/S"); + if (-not (Test-Path $env:VULKAN_SDK)) { + Info("Download and install Vulkan") + Invoke-WebRequest ` + -Uri 'https://sdk.lunarg.com/sdk/download/1.2.189.0/windows/VulkanSDK-1.2.189.0-Installer.exe' ` + -MaximumRetryCount 10 -RetryIntervalSec 5 ` + -OutFile VulkanSDK.exe + $installer = Start-Process -FilePath VulkanSDK.exe -Wait -PassThru -ArgumentList @("/S") $installer.WaitForExit(); } - $env:VULKAN_SDK = "$libsDir\VulkanSDK\1.2.189.0" + $env:VULKAN_SDK = "C:\VulkanSDK\1.2.189.0" $env:PATH += ";$env:VULKAN_SDK\Bin" $env:TAICHI_CMAKE_ARGS += " -DTI_WITH_VULKAN:BOOL=ON" } -$env:TAICHI_CMAKE_ARGS += " -DTI_WITH_C_API:BOOL=ON" -$env:TAICHI_CMAKE_ARGS += " -DTI_WITH_DX11:BOOL=ON" - Pop-Location -clang --version - -WriteInfo("Setting up Python environment") -python -m venv venv -. venv\Scripts\activate.ps1 -python -m pip install wheel -python -m pip install -r requirements_dev.txt -if (-not $?) { exit 1 } -WriteInfo("Building Taichi") -if (!$llvmVer.CompareTo("10")) { - $env:TAICHI_CMAKE_ARGS += " -DCLANG_EXECUTABLE=$libsDir\\taichi_clang\\bin\\clang++.exe" - $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=$libsDir\\taichi_llvm\\bin\\llvm-as.exe" -} else { - $env:TAICHI_CMAKE_ARGS += " -DCLANG_EXECUTABLE=$libsDir\\taichi_clang_15\\bin\\clang++.exe" - $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=$libsDir\\taichi_llvm_15\\bin\\llvm-as.exe" - $env:TAICHI_CMAKE_ARGS += " -DTI_LLVM_15:BOOL=ON" -} +Invoke clang --version + +Setup-Python $libsDir $env:PY + +Invoke python -m pip install -r requirements_dev.txt + +Info("Building Taichi") + if ($install) { if ($develop) { - python setup.py develop + Invoke python setup.py develop } else { - python setup.py install + Invoke python setup.py install } - if (-not $?) { exit 1 } - WriteInfo("Build and install finished") + Info("Build and install finished") } else { if ($env:PROJECT_NAME -eq "taichi-nightly") { - python setup.py egg_info --tag-date bdist_wheel + Invoke python setup.py egg_info --tag-date bdist_wheel } else { - python setup.py bdist_wheel + Invoke python setup.py bdist_wheel } - if (-not $?) { exit 1 } - WriteInfo("Build finished") + Info("Build finished") } + ccache -s -v diff --git a/.github/workflows/scripts/win_build_test_cpu.ps1 b/.github/workflows/scripts/win_build_test_cpu.ps1 deleted file mode 100644 index 4aa90582517ed..0000000000000 --- a/.github/workflows/scripts/win_build_test_cpu.ps1 +++ /dev/null @@ -1,116 +0,0 @@ -# Build script for windows CPU -# TODO unify this with the other Win scripts - -param ( - [switch]$clone = $false, - [switch]$install = $false, - [string]$libsDir = "C:\", - [string]$llvmVer = "10" -) - -$ErrorActionPreference = "Stop" - -$RepoURL = 'https://github.com/taichi-dev/taichi' - -function WriteInfo($text) { - Write-Host -ForegroundColor Green "[BUILD] $text" -} - -$libsDir = (Resolve-Path $libsDir).Path -if (-not (Test-Path $libsDir)) { - New-Item -ItemType Directory -Path $libsDir -} -Set-Location $libsDir - -if (!$llvmVer.CompareTo("10")) { - if (-not (Test-Path "taichi_llvm")) { - WriteInfo("Download and extract LLVM") - - - curl.exe --retry 10 --retry-delay 5 https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/taichi-llvm-10.0.0-msvc2019.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x taichi-llvm-10.0.0-msvc2019.zip -otaichi_llvm - } -} else { - if (-not (Test-Path "taichi_llvm_15")) { - WriteInfo("Download and extract LLVM") - curl.exe --retry 10 --retry-delay 5 https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2019_clang_220731/taichi-llvm-15.0.0-msvc2019.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x taichi-llvm-15.0.0-msvc2019.zip -otaichi_llvm_15 - } -} - -if (-not (Test-Path "taichi_clang")) { - WriteInfo("Download and extract Clang") - curl.exe --retry 10 --retry-delay 5 https://github.com/taichi-dev/taichi_assets/releases/download/llvm10/clang-10.0.0-win.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x clang-10.0.0-win.zip -otaichi_clang -} - -WriteInfo("Setting the env vars") -if (!$llvmVer.CompareTo("10")) { - $env:LLVM_DIR = "C://taichi_llvm" -} else { - $env:LLVM_DIR = "C:\\taichi_llvm_15" -} - -#TODO enable build test -$env:TAICHI_CMAKE_ARGS = "-DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=OFF -DTI_WITH_CUDA:BOOL=OFF -DTI_BUILD_TESTS:BOOL=OFF" - -#TODO: For now we need to hard code the compiler path from build tools 2019 -$env:TAICHI_CMAKE_ARGS +=' -DCMAKE_CXX_COMPILER=C:/Program\ Files\ (x86)/Microsoft\ Visual\ Studio/2019/BuildTools/vc/Tools/Llvm/x64/bin/clang++.exe -DCMAKE_C_COMPILER=C:/Program\ Files\ (x86)/Microsoft\ Visual\ Studio/2019/BuildTools/vc/Tools/Llvm/x64/bin/clang.exe' -$env:TAICHI_CMAKE_ARGS += " -DCLANG_EXECUTABLE=C:\\taichi_clang\\bin\\clang++.exe" - -if (!$llvmVer.CompareTo("10")) { - $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=C:\\taichi_llvm\\bin\\llvm-as.exe -DTI_WITH_VULKAN:BOOL=OFF" -} else { - $env:TAICHI_CMAKE_ARGS += " -DLLVM_AS_EXECUTABLE=C:\\taichi_llvm_15\\bin\\llvm-as.exe -DTI_WITH_VULKAN:BOOL=OFF" - $env:TAICHI_CMAKE_ARGS += " -DTI_LLVM_15:BOOL=ON" - $env:TAICHI_CMAKE_ARGS += " -DTI_WITH_DX12:BOOL=ON" -} - - -WriteInfo("Checking clang compiler") -clang --version - -WriteInfo("Enter the repository") -Set-Location .\taichi - -# Get sccache -WriteInfo("Downloading sccache") -$env:CCACHE_DIR="${pwd}/ccache_cache" -$env:CCACHE_MAXSIZE="128M" -$env:CCACHE_LOGFILE="${pwd}/ccache_error.log" -WriteInfo("ccache dir: $Env:CCACHE_DIR") -md "$Env:CCACHE_DIR" -ea 0 -if (-not (Test-Path "ccache-4.5.1-windows-64")) { - curl.exe --retry 10 --retry-delay 5 https://github.com/ccache/ccache/releases/download/v4.5.1/ccache-4.5.1-windows-64.zip -LO - if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE; } - 7z x ccache-4.5.1-windows-64.zip - $env:PATH += ";${pwd}/ccache-4.5.1-windows-64" -} -ccache -v -s - -WriteInfo("Setting up Python environment") -conda activate py37 -python -m pip install -r requirements_dev.txt -python -m pip install -r requirements_test.txt - -# These have to be re-installed to avoid strange certificate issue -# on CPU docker environment -python -m pip install --upgrade --force-reinstall numpy -python -m pip install --upgrade --force-reinstall cmake -python -m pip install --upgrade --force-reinstall wheel -if (-not $?) { exit 1 } - -WriteInfo("Building Taichi") -python setup.py install -if (-not $?) { exit 1 } -WriteInfo("Build finished") -ccache -s -v - -# We skip the test for the moment due to the long job execution time. -#$env:TI_ENABLE_PADDLE = "0" -#WriteInfo("Testing Taichi") -#python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a cpu -#WriteInfo("Test finished") diff --git a/.github/workflows/scripts/win_test.ps1 b/.github/workflows/scripts/win_test.ps1 index 371ebed695e51..36659f62187e8 100644 --- a/.github/workflows/scripts/win_test.ps1 +++ b/.github/workflows/scripts/win_test.ps1 @@ -1,59 +1,73 @@ +param ( + [string]$libsDir = "." +) + $ErrorActionPreference = "Stop" +. $PSScriptRoot\common-utils.ps1 + +Setup-VS + $env:PYTHONUNBUFFERED = 1 $env:TI_CI = 1 $env:TI_OFFLINE_CACHE_FILE_PATH = Join-Path -Path $pwd -ChildPath ".cache\taichi" -. venv\Scripts\activate.ps1 -python -c "import taichi" -ti diagnose -ti changelog -echo wanted arch: $env:TI_WANTED_ARCHS -pip install -r requirements_test.txt -# TODO relax this when torch supports 3.10 -if ("$env:TI_WANTED_ARCHS".Contains("cuda")) { - pip install "torch==1.10.1+cu113; python_version < '3.10'" -f https://download.pytorch.org/whl/cu113/torch_stable.html -} else { - pip install "torch; python_version < '3.10'" - pip install "paddlepaddle==2.3.0; python_version < '3.10'" -} +Setup-Python $libsDir $env:PY +$os = Get-CimInstance -Class Win32_OperatingSystem +Info "Total system memory: $($os.TotalVisibleMemorySize / 1024 / 1024) GB" -if ("$env:TI_RUN_RELEASE_TESTS" -eq "1" -and -not "$env:TI_LITE_TEST") { - echo "Running release tests" - # release tests - pip install PyYAML - git clone https://github.com/taichi-dev/taichi-release-tests - mkdir -p repos/taichi/python/taichi - $EXAMPLES = & python -c 'import taichi.examples as e; print(e.__path__._path[0])' | Select-Object -Last 1 - New-Item -Target $EXAMPLES -Path repos/taichi/python/taichi/examples -ItemType Junction - New-Item -Target taichi-release-tests/truths -Path truths -ItemType Junction - python taichi-release-tests/run.py --log=DEBUG --runners 1 taichi-release-tests/timelines - if (-not $?) { exit 1 } -} +$whl = & Get-ChildItem -Filter '*.whl' -Path dist | Select-Object -First 1 +echo $whl +Invoke python -m pip install $whl.FullName +Invoke python -c "import taichi" +Invoke ti diagnose +# Invoke ti changelog +echo wanted arch: $env:TI_WANTED_ARCHS +Invoke pip install -r requirements_test.txt +Invoke pip install "paddlepaddle==2.3.0; python_version < '3.10'" # Run C++ tests -python tests/run_tests.py --cpp -if (-not $?) { exit 1 } +Invoke python tests/run_tests.py --cpp # Fail fast, give priority to the error-prone tests -python tests/run_tests.py -vr2 -t1 -k "paddle" -a cpu -if (-not $?) { exit 1 } +Invoke python tests/run_tests.py -vr2 -t1 -k "paddle" -a cpu # Disable paddle for the remaining test $env:TI_ENABLE_PADDLE = "0" -if ("$env:TI_WANTED_ARCHS".Contains("cuda")) { - python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a cuda - if (-not $?) { exit 1 } -} if ("$env:TI_WANTED_ARCHS".Contains("cpu")) { - python tests/run_tests.py -vr2 -t6 -k "not torch and not paddle" -a cpu - if (-not $?) { exit 1 } + # NOTE: Always test CPU with non-CUDA version of PyTorch, + # since CUDA version of PyTorch will load a lot of CUDA libraries, + # which inflates required commited memory usage (not physical memory, but still relevant) + # to 5GiB per test process (compared to 1.4GiB for non-CUDA version). + # This greatly improves test paralllism. + # This is a non-issue on Linux, since Linux overcommits. + # TODO relax this when torch supports 3.10 + Invoke pip install "torch==1.12.1; python_version < '3.10'" + Invoke python tests/run_tests.py -vr2 "-t$(EstimateNumProcs)" -k "not torch and not paddle" -a cpu +} +if ("$env:TI_WANTED_ARCHS".Contains("cuda")) { + # TODO relax this when torch supports 3.10 + Invoke pip install "torch==1.10.1+cu113; python_version < '3.10'" -f https://download.pytorch.org/whl/cu113/torch_stable.html + Invoke python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a cuda } if ("$env:TI_WANTED_ARCHS".Contains("opengl")) { - python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a opengl - if (-not $?) { exit 1 } + Invoke python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a opengl +} +if ("$env:TI_WANTED_ARCHS".Contains("vulkan")) { + Invoke python tests/run_tests.py -vr2 -t4 -k "not torch and not paddle" -a vulkan +} +Invoke python tests/run_tests.py -vr2 -t1 -k "torch" -a "$env:TI_WANTED_ARCHS" + +if ("$env:TI_RUN_RELEASE_TESTS" -eq "1" -and -not "$env:TI_LITE_TEST") { + echo "Running release tests" + # release tests + Invoke pip install PyYAML + Invoke git clone https://github.com/taichi-dev/taichi-release-tests + mkdir -p repos/taichi/python/taichi + $EXAMPLES = & python -c 'import taichi.examples as e; print(e.__path__._path[0])' | Select-Object -Last 1 + New-Item -Target $EXAMPLES -Path repos/taichi/python/taichi/examples -ItemType Junction + New-Item -Target taichi-release-tests/truths -Path truths -ItemType Junction + Invoke python taichi-release-tests/run.py --log=DEBUG --runners 1 taichi-release-tests/timelines } -python tests/run_tests.py -vr2 -t1 -k "torch" -a "$env:TI_WANTED_ARCHS" -if (-not $?) { exit 1 } diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 6ac9cd1927c7a..555662ffa9cf8 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -40,9 +40,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: - fetch-depth: 2 + fetch-depth: '0' - name: check modified files id: check_files @@ -83,9 +83,10 @@ jobs: runs-on: ubuntu-latest needs: check_files steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - name: clang-tidy run: | @@ -94,8 +95,8 @@ jobs: fi # https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#upgrading-a-workflow-that-accesses-ghcrio echo $CR_PAT | docker login ghcr.io -u ${{ github.actor }} --password-stdin - docker pull ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.2 - docker run -id --user dev --name check_clang_tidy ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.2 /bin/bash + docker pull ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.3 + docker run -id --user dev --name check_clang_tidy ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.3 /bin/bash tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx --owner 1000 --group 1000 | docker cp - check_clang_tidy:/home/dev/ docker exec --user root check_clang_tidy apt install -y clang-tidy-10 docker exec --user dev check_clang_tidy /home/dev/taichi/.github/workflows/scripts/check_clang_tidy.sh "$CI_SETUP_CMAKE_ARGS" @@ -106,7 +107,7 @@ jobs: build_and_test_cpu_linux: name: Build and Test linux (CPU) needs: check_files - timeout-minutes: ${{ github.event.schedule != '0 18 * * *' && 60 || 120 }} + timeout-minutes: ${{ github.event.schedule != '0 18 * * *' && 90 || 120 }} strategy: matrix: include: @@ -123,89 +124,69 @@ jobs: packages: read contents: read steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: + fetch-depth: '0' submodules: 'recursive' - - name: Get sccache cache + - name: Get Build Cache uses: actions/cache@v2 with: - path: sccache_cache - key: sccache-linux-${{matrix.with_cc}}-${{ github.sha }} + path: /tmp/build-cache + key: build-cache-linux-${{ github.sha }} restore-keys: | - sccache-linux-${{matrix.with_cc}}- + build-cache-linux- - - name: Get docker images + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache /tmp/build-cache + echo CI_DOCKER_RUN_EXTRA_ARGS="-v $(pwd):/home/dev/taichi" >> $GITHUB_ENV + sudo -n chown -R 1000:1000 . || true + + - name: Get Docker Images + if: needs.check_files.outputs.run_job == 'true' run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi # https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#upgrading-a-workflow-that-accesses-ghcrio - echo $CR_PAT | docker login ghcr.io -u ${{ github.actor }} --password-stdin - docker pull ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.2 - env: - CR_PAT: ${{ secrets.GITHUB_TOKEN }} + docker login ghcr.io -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} + docker pull ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.3 - name: Build + if: needs.check_files.outputs.run_job == 'true' run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - mkdir -m777 shared - docker create --user dev --name taichi_build \ - -e TI_LITE_TEST \ - -e TI_TEST_OFFLINE_CACHE \ - -e PY -e PROJECT_NAME -e TAICHI_CMAKE_ARGS \ - ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.2 \ - /home/dev/taichi/.github/workflows/scripts/unix_build.sh - # A tarball is needed because sccache needs some permissions that only the file owner has. - # 1000 is the uid and gid of user "dev" in the container. - # If the uid or gid of the user inside the docker changes, please change the uid and gid in the following line. - tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx --owner 1000 --group 1000 | docker cp - taichi_build:/home/dev/ - docker start -a taichi_build - rm -rf sccache_cache - docker cp taichi_build:/home/dev/taichi/sccache_cache sccache_cache - docker cp taichi_build:/home/dev/taichi/dist shared/dist - docker cp taichi_build:/home/dev/taichi/build shared/build + . .github/workflows/scripts/common-utils.sh + ci-docker-run \ + ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.3 \ + /home/dev/taichi/.github/workflows/scripts/unix-build.sh + env: PY: ${{ matrix.python }} PROJECT_NAME: taichi - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CC:BOOL=${{ matrix.with_cc }} -DTI_WITH_VULKAN:BOOL=OFF -DTI_BUILD_TESTS:BOOL=ON -DTI_WITH_C_API=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CC:BOOL=${{ matrix.with_cc }} + -DTI_WITH_VULKAN:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=ON + -DTI_WITH_C_API=ON - name: Test id: test + if: needs.check_files.outputs.run_job == 'true' run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - docker create --user dev --name taichi_test \ - -e PY -e TI_WANTED_ARCHS \ - -e TI_LITE_TEST \ - -e TI_TEST_OFFLINE_CACHE \ - ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.2 /home/dev/unix_test.sh - docker cp .github/workflows/scripts/unix_test.sh taichi_test:/home/dev/unix_test.sh - docker cp .github/workflows/scripts/common-utils.sh taichi_test:/home/dev/common-utils.sh - docker cp shared/dist/ taichi_test:/home/dev/ - docker cp shared/build/ taichi_test:/home/dev/ - docker cp ./requirements_test.txt taichi_test:/home/dev/requirements_test.txt - docker cp tests/ taichi_test:/home/dev/ - docker cp pyproject.toml taichi_test:/home/dev/ - docker start -a taichi_test + . .github/workflows/scripts/common-utils.sh + ci-docker-run \ + ghcr.io/taichi-dev/taichidev-cpu-ubuntu18.04:v0.3.3 \ + /home/dev/taichi/.github/workflows/scripts/unix_test.sh env: PY: ${{ matrix.python }} TI_WANTED_ARCHS: ${{ matrix.wanted_archs }} - - name: clean docker container - if: always() - run: | - docker rm taichi_build taichi_test -f - - name: Save wheel if test failed if: failure() && steps.test.conclusion == 'failure' uses: actions/upload-artifact@v3 with: name: broken-wheel - path: shared/dist/* + path: dist/* retention-days: 7 build_and_test_cpu_mac: @@ -226,44 +207,32 @@ jobs: env: PY: ${{ matrix.python }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: + fetch-depth: '0' submodules: 'recursive' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-mac-${{ github.sha }} - restore-keys: | - sccache-mac- - - - name: Setup Python PATH && Download Pre-Built LLVM 10.0.0 + - name: Prepare Environment run: | - export PATH=`pwd`/taichi-llvm/bin/:$PATH - # miniconda / miniforge - export PATH=$(ls -d ~/mini*/envs/$PY/bin):$PATH - if [[ "${{needs.check_files.outputs.run_job}}" == "false" ]]; then - exit 0 - fi + . .github/workflows/scripts/common-utils.sh + prepare-build-cache python misc/ci_download.py - echo PATH=$PATH >> $GITHUB_ENV - # env: CI_PLATFORM: macos - name: Build & Install + if: needs.check_files.outputs.run_job == 'true' run: | - if [[ "${{needs.check_files.outputs.run_job}}" == "false" ]]; then - exit 0 - fi brew install molten-vk - mkdir -p sccache_cache - .github/workflows/scripts/unix_build.sh - brew uninstall molten-vk + .github/workflows/scripts/unix-build.sh env: - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CC:BOOL=${{ matrix.with_cc }} -DTI_WITH_VULKAN:BOOL=ON -DTI_WITH_C_API=ON -DTI_BUILD_TESTS:BOOL=${{ matrix.with_cpp_tests }} -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache CXX: clang++ + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CC:BOOL=${{ matrix.with_cc }} + -DTI_WITH_VULKAN:BOOL=ON + -DTI_WITH_C_API=ON + -DTI_BUILD_TESTS:BOOL=${{ matrix.with_cpp_tests }} # [DEBUG] Copy this step around to enable debugging inside Github Action instances. #- name: Setup tmate session @@ -273,11 +242,8 @@ jobs: - name: Test id: test - run: | - if [[ "${{needs.check_files.outputs.run_job}}" == "false" ]]; then - exit 0 - fi - .github/workflows/scripts/unix_test.sh + if: needs.check_files.outputs.run_job == 'true' + run: .github/workflows/scripts/unix_test.sh env: TI_WANTED_ARCHS: ${{ matrix.wanted_archs }} TI_SKIP_CPP_TESTS: Disabled because Vulkan is supported but not working on buildbot4 @@ -305,59 +271,68 @@ jobs: permissions: packages: read contents: read + env: + PY: "3.7" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: + fetch-depth: '0' submodules: 'recursive' - - name: Get sccache cache - uses: actions/cache@v2 + - name: Get Build Cache + uses: actions/cache@v3 if: needs.check_files.outputs.run_job == 'true' with: - path: ccache_cache - key: ccache-win64-cpu-${{ github.sha }} + path: ${{ env.LocalAppData }}/build-cache + key: build-cache-win64-cpu-${{ github.sha }} restore-keys: | - ccache-win64-cpu- + build-cache-win64-cpu- - - name: Get docker images - shell: bash + - name: Prepare Environment + shell: pwsh if: needs.check_files.outputs.run_job == 'true' run: | - echo $CR_PAT | docker login ghcr.io -u ${{ github.actor }} --password-stdin - docker pull ghcr.io/taichi-dev/taichidev-cpu-windows:v0.0.1 - env: - CR_PAT: ${{ secrets.GITHUB_TOKEN }} + . .github/workflows/scripts/common-utils.ps1 + Invoke docker login ghcr.io -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} + Invoke docker pull ghcr.io/taichi-dev/taichidev-cpu-windows:v0.0.2 + PrepareBuildCache + echo CI_DOCKER_RUN_EXTRA_ARGS="--entrypoint pwsh.exe -v ${pwd}:D: -w D:/" >> $env:GITHUB_ENV - # TODO: split build and test - - name: Build and Test - shell: bash + - name: Build + shell: pwsh + if: needs.check_files.outputs.run_job == 'true' run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - docker create --name taichi_build_test \ - -e TI_LITE_TEST \ - -e TI_TEST_OFFLINE_CACHE \ - ghcr.io/taichi-dev/taichidev-cpu-windows:v0.0.1 \ - C:/taichi/.github/workflows/scripts/win_build_test_cpu.ps1 -llvmVer ${{ matrix.llvmVer }} - tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx | docker cp - taichi_build_test:C:/ - docker start -a taichi_build_test - rm -rf ccache_cache - docker cp taichi_build_test:C:/taichi/ccache_cache ccache_cache - - - name: clean docker container - shell: bash - if: always() + . .github/workflows/scripts/common-utils.ps1 + CIDockerRun ` + ghcr.io/taichi-dev/taichidev-cpu-windows:v0.0.2 ` + .github/workflows/scripts/win_build.ps1 -llvmVer ${{ matrix.llvmVer }} -libsDir X:/ + env: + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=OFF + -DTI_WITH_CUDA:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=OFF + -DTI_WITH_DX12:BOOL=ON + + - name: Test + shell: pwsh + if: needs.check_files.outputs.run_job == 'true' run: | - docker rm taichi_build_test -f + . .github/workflows/scripts/common-utils.ps1 + CIDockerRun ` + ghcr.io/taichi-dev/taichidev-cpu-windows:v0.0.2 ` + .github/workflows/scripts/win_test.ps1 -libsDir X:/ + env: + TI_WANTED_ARCHS: cpu - # - name: Save wheel if test failed - # if: failure() && steps.build_and_test.conclusion == 'failure' - # uses: actions/upload-artifact@v3 - # with: - # name: broken-wheel - # path: dist/* - # retention-days: 7 + - name: Save wheel if test failed + if: failure() && steps.build_and_test.conclusion == 'failure' + uses: actions/upload-artifact@v3 + with: + name: broken-wheel + path: dist/* + retention-days: 7 build_and_test_gpu_linux: name: Build and Test (GPU) @@ -371,108 +346,57 @@ jobs: runs-on: ${{ matrix.tags }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: 'recursive' + fetch-depth: '0' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-linux-gpu-${{ github.sha }} - restore-keys: | - sccache-linux-gpu- + - name: Prepare Environment + run: | + . .github/workflows/scripts/common-utils.sh + prepare-build-cache + echo CI_DOCKER_RUN_EXTRA_ARGS="-v $(pwd):/home/dev/taichi" >> $GITHUB_ENV - name: Build & Install run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - mkdir -m777 shared - for i in {0..9}; do - if xset -display ":$i" -q >/dev/null 2>&1; then - break - fi - done - if [ $? -ne 0 ]; then - echo "No display!" - exit 1 - fi - export DISPLAY=:$i - docker create --user dev --name taichi_build \ - -e TI_LITE_TEST \ - -e TI_TEST_OFFLINE_CACHE \ - --gpus 'all,"capabilities=graphics,utility,display,video,compute"' \ - -v /tmp/.X11-unix:/tmp/.X11-unix \ - -e PY -e GPU_BUILD -e PROJECT_NAME -e TAICHI_CMAKE_ARGS -e DISPLAY \ + [[ ${{needs.check_files.outputs.run_job}} == false ]] && exit 0 + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-build \ registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ - /home/dev/taichi/.github/workflows/scripts/unix_build.sh - # A tarball is needed because sccache needs some permissions that only the file owner has. - # 1000 is the uid and gid of user "dev" in the container. - # If the uid or gid of the user inside the docker changes, please change the uid and gid in the following line. - tar -cf - ../${{ github.event.repository.name }} --mode u=+rwx,g=+rwx,o=+rwx --owner 1000 --group 1000 | docker cp - taichi_build:/home/dev/ - docker start -a taichi_build - rm -rf sccache_cache - docker cp taichi_build:/home/dev/taichi/sccache_cache sccache_cache - docker cp taichi_build:/home/dev/taichi/dist shared/dist - docker cp taichi_build:/home/dev/taichi/build shared/build + /home/dev/taichi/.github/workflows/scripts/unix-build.sh + env: PY: py38 - GPU_BUILD: ON PROJECT_NAME: taichi - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=ON -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=ON -DTI_WITH_C_API=ON -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=ON + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=ON + -DTI_BUILD_TESTS:BOOL=ON + -DTI_WITH_C_API=ON - name: Test id: test run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - for i in {0..9}; do - if xset -display ":$i" -q >/dev/null 2>&1; then - break - fi - done - if [ $? -ne 0 ]; then - echo "No display!" - exit 1 - fi - export DISPLAY=:$i - docker create --user dev --name taichi_test \ - --gpus 'all,"capabilities=graphics,utility,display,video,compute"' \ - -v /tmp/.X11-unix:/tmp/.X11-unix \ - -e TI_LITE_TEST \ - -e TI_TEST_OFFLINE_CACHE \ - -e DISPLAY -e PY -e GPU_TEST -e TI_WANTED_ARCHS -e TI_RUN_RELEASE_TESTS \ + [[ ${{needs.check_files.outputs.run_job}} == false ]] && exit 0 + . .github/workflows/scripts/common-utils.sh + + ci-docker-run-gpu --name taichi-test \ registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ - /home/dev/unix_test.sh - docker cp .github/workflows/scripts/unix_test.sh taichi_test:/home/dev/unix_test.sh - docker cp .github/workflows/scripts/common-utils.sh taichi_test:/home/dev/common-utils.sh - docker cp shared/dist/ taichi_test:/home/dev/ - docker cp shared/build/ taichi_test:/home/dev/ - docker cp pyproject.toml taichi_test:/home/dev/ - docker cp tests/ taichi_test:/home/dev/ - docker cp requirements_test.txt taichi_test:/home/dev/requirements_test.txt - docker start -a taichi_test + /home/dev/taichi/.github/workflows/scripts/unix_test.sh env: PY: py38 - GPU_TEST: ON TI_WANTED_ARCHS: 'cuda,vulkan,opengl' TI_DEVICE_MEMORY_GB: '0.7' TI_RUN_RELEASE_TESTS: '1' - - - name: clean docker container - if: always() - run: | - docker rm taichi_build taichi_test -f - - name: Save wheel if test failed if: failure() && steps.test.conclusion == 'failure' uses: actions/upload-artifact@v3 with: name: broken-wheel - path: shared/dist/* + path: dist/* retention-days: 7 build_and_test_windows: @@ -492,52 +416,39 @@ jobs: runs-on: ${{ matrix.runsOn }} timeout-minutes: ${{ github.event.schedule != '0 18 * * *' && 90 || 180 }} steps: - # See also https://github.com/taichi-dev/taichi/issues/4161 - - name: Cleanup - shell: pwsh - run: | - remove-item '${{ github.workspace }}\*' -recurse -force - - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: + fetch-depth: '0' submodules: 'recursive' - uses: actions/setup-python@v2 with: python-version: 3.7 - - name: Add Visual Studio Shell to ENV - uses: egor-tensin/vs-shell@v2 - with: - arch: x64 - - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: ccache_cache - key: ccache-win64-${{ github.sha }} - restore-keys: | - ccache-win64- - - name: Build shell: pwsh if: ${{ needs.check_files.outputs.run_job != 'false' }} run: | - .\.github\workflows\scripts\win_build.ps1 -llvmVer ${{ matrix.llvmVer }} -installVulkan -install -libsDir C:\ + .\.github\workflows\scripts\win_build.ps1 -llvmVer ${{ matrix.llvmVer }} -installVulkan -libsDir "$env:LocalAppData/buildbot" env: - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=ON -DTI_WITH_DX11:BOOL=ON -DTI_WITH_CC:BOOL=OFF -DTI_WITH_C_API=ON -DTI_BUILD_TESTS:BOOL=ON + PY: "3.7" + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=ON + -DTI_WITH_DX11:BOOL=ON + -DTI_WITH_CC:BOOL=OFF + -DTI_BUILD_TESTS:BOOL=ON + -DTI_WITH_C_API=ON - name: Test id: test shell: pwsh if: ${{ needs.check_files.outputs.run_job != 'false' }} run: | - .\.github\workflows\scripts\win_test.ps1 + .\.github\workflows\scripts\win_test.ps1 -libsDir "$env:LocalAppData/buildbot" env: + PY: "3.7" TI_WANTED_ARCHS: ${{ matrix.archs }} TI_SKIP_VERSION_CHECK: ON - TI_CI: 1 - PYTHON: '3.7' TI_DEVICE_MEMORY_GB: '0.7' TI_RUN_RELEASE_TESTS: '1' @@ -564,44 +475,38 @@ jobs: shell: '/usr/bin/arch -arch arm64e /bin/bash --noprofile --norc -eo pipefail {0}' runs-on: [self-hosted, m1] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: + fetch-depth: '0' submodules: 'recursive' - - name: Get sccache cache - uses: actions/cache@v2 - with: - path: sccache_cache - key: sccache-m1-${{ github.sha }} - restore-keys: | - sccache-m1- - - name: Build run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - export PATH=/Users/github/miniforge3/envs/$PY/bin:$PATH + [[ ${{needs.check_files.outputs.run_job}} == false ]] && exit 0 + . .github/workflows/scripts/common-utils.sh + prepare-build-cache brew install molten-vk - .github/workflows/scripts/unix_build.sh + .github/workflows/scripts/unix-build.sh env: - TAICHI_CMAKE_ARGS: -DTI_WITH_OPENGL:BOOL=OFF -DTI_WITH_CUDA:BOOL=OFF -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=ON -DTI_WITH_C_API=ON -DTI_BUILD_TESTS:BOOL=ON -DCMAKE_C_COMPILER_LAUNCHER=sccache -DCMAKE_CXX_COMPILER_LAUNCHER=sccache PY: ${{ matrix.python }} CXX: clang++ + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=OFF + -DTI_WITH_CUDA:BOOL=OFF + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=ON + -DTI_BUILD_TESTS:BOOL=ON + -DTI_WITH_C_API=ON - name: Test id: test run: | - if [[ ${{needs.check_files.outputs.run_job}} == false ]]; then - exit 0 - fi - export PATH=/Users/github/miniforge3/envs/$PY/bin:$PATH + [[ ${{needs.check_files.outputs.run_job}} == false ]] && exit 0 .github/workflows/scripts/unix_test.sh env: TI_WANTED_ARCHS: 'cpu,metal,vulkan' PY: ${{ matrix.python }} PLATFORM: 'm1' - TI_CI: 1 TI_RUN_RELEASE_TESTS: '1' - name: Save wheel if test failed @@ -626,101 +531,84 @@ jobs: REDIS_HOST: 172.16.5.8 PY: py39 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 name: Checkout taichi with: + fetch-depth: '0' submodules: "recursive" - name: Prepare Environment run: >- + . .github/workflows/scripts/common-utils.sh; + prepare-build-cache; + TAICHI_WHEEL_DIR=$(mktemp -d); echo TAICHI_WHEEL_DIR=$TAICHI_WHEEL_DIR >> $GITHUB_ENV; chmod 0777 $TAICHI_WHEEL_DIR; - echo DOCKER_RUN_ARGS=" - -e PY + echo CI_DOCKER_RUN_EXTRA_ARGS=" -e REDIS_HOST - -e TI_LITE_TEST - -e TAICHI_CMAKE_ARGS - -e SCCACHE_ROOT=/var/lib/sccache -v $(pwd):/home/dev/taichi - -v $(readlink -f ~/sccache):/var/lib/sccache - -v $(readlink -f ~/git-cache):/var/lib/git-cache - -v $(readlink -f ~/build-cache)/pip:/home/dev/.cache/pip - -v $(readlink -f ~/build-cache)/dot-gradle:/home/dev/.gradle -v $TAICHI_WHEEL_DIR:/taichi-wheel " >> $GITHUB_ENV; - for i in {0..9}; do if xset -display ":$i" -q >/dev/null 2>&1; then break; fi; done; - if [ $? -ne 0 ]; then echo "No display!"; exit 1; fi; - - echo DOCKER_RUN_GPU_ARGS=" - --runtime=nvidia - -e DISPLAY=:$i - -e NVIDIA_VISIBLE_DEVICES=all - -e NVIDIA_DRIVER_CAPABILITIES=all - " >> $GITHUB_ENV; - - mkdir -p ~/build-cache/pip; chmod 0777 ~/build-cache/pip; - mkdir -p ~/build-cache/dot-gradle; chmod 0777 ~/build-cache/dot-gradle; - mkdir -p ~/build-cache/pip; chmod 0777 ~/build-cache/pip; - - name: Build Host Taichi run: | - docker rm -f taichi_build_host - docker run --user dev --name taichi_build_host \ - $DOCKER_RUN_ARGS \ + . .github/workflows/scripts/common-utils.sh + ci-docker-run --name taichi-build-host \ -v $TAICHI_WHEEL_DIR:/home/dev/taichi/dist \ registry.taichigraphics.com/taichidev-ubuntu18.04:v0.3.4 \ - /home/dev/taichi/.github/workflows/scripts/unix-build-v2.sh + /home/dev/taichi/.github/workflows/scripts/unix-build.sh env: TAICHI_CMAKE_ARGS: >- -DTI_WITH_OPENGL:BOOL=ON -DTI_WITH_CC:BOOL=OFF -DTI_WITH_VULKAN:BOOL=ON - -DTI_WITH_C_API=ON - -DCMAKE_C_COMPILER_LAUNCHER=sccache - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache + -DTI_WITH_C_API=OFF - name: Build For Android run: | + . .github/workflows/scripts/common-utils.sh git clean -fxd - docker rm taichi_build_android -f chown -R 1000:1000 . - docker run -i --user dev --name taichi_build_android \ - $DOCKER_RUN_ARGS \ + ci-docker-run --name taichi-build-android \ registry.taichigraphics.com/taichidev-androidsdk18.04:v0.0.6 \ - /home/dev/taichi/.github/workflows/scripts/android-build.sh build-ext + /home/dev/taichi/.github/workflows/scripts/android-build.sh + env: + TAICHI_CMAKE_ARGS: >- + -DTI_WITH_OPENGL:BOOL=ON + -DTI_WITH_CC:BOOL=OFF + -DTI_WITH_VULKAN:BOOL=ON + -DTI_WITH_LLVM:BOOL=OFF + -DTI_WITH_C_API=ON - name: Test For Android AOT (export core) run: | - docker rm taichi_test_android -f + . .github/workflows/scripts/common-utils.sh chown -R 1000:1000 . - docker run -i --user dev --name taichi_test_android \ - $DOCKER_RUN_ARGS \ - $DOCKER_RUN_GPU_ARGS \ + ci-docker-run-gpu --name taichi-test-android \ registry.taichigraphics.com/taichidev-androidsdk18.04:v0.0.6 \ /home/dev/taichi/.github/workflows/scripts/aot-demo.sh build-and-smoke-test-android-aot-demo - name: Prepare Unity Build Environment run: | + . .github/workflows/scripts/common-utils.sh chown -R 1000:1000 . - docker run -i --rm --user dev \ - $DOCKER_RUN_ARGS \ + ci-docker-run \ registry.taichigraphics.com/taichidev-androidsdk18.04:v0.0.6 \ /home/dev/taichi/.github/workflows/scripts/aot-demo.sh prepare-unity-build-env - name: Build Taichi-UnityExample run: | - docker run -i --rm --user dev \ - $DOCKER_RUN_ARGS \ + . .github/workflows/scripts/common-utils.sh + ci-docker-run \ registry.taichigraphics.com/unityci-editor:ubuntu-2020.3.14f1-android-1-with-secret-sauce \ /home/dev/taichi/.github/workflows/scripts/aot-demo.sh build-unity-demo - name: Run Taichi-UnityExample (C-API) run: | + . .github/workflows/scripts/common-utils.sh chown -R 1000:1000 . - docker run -i --rm --user dev \ - $DOCKER_RUN_ARGS \ + ci-docker-run \ registry.taichigraphics.com/taichidev-androidsdk18.04:v0.0.6 \ /home/dev/taichi/.github/workflows/scripts/aot-demo.sh smoke-test-unity-demo diff --git a/ci/Dockerfile.ubuntu.18.04.cpu b/ci/Dockerfile.ubuntu.18.04.cpu index 6c545669d22c7..31222f2c0f0a7 100644 --- a/ci/Dockerfile.ubuntu.18.04.cpu +++ b/ci/Dockerfile.ubuntu.18.04.cpu @@ -11,12 +11,14 @@ RUN apt-get update && \ apt-get install -y python3-pip \ libtinfo-dev \ clang-10 \ + sudo \ wget \ git \ unzip \ libx11-xcb-dev \ zlib1g-dev \ && \ + printf "root ALL=(ALL:ALL) NOPASSWD: ALL\ndev ALL=(ALL:ALL) NOPASSWD: ALL\n" > /etc/sudoers && \ apt-get autoclean && \ rm -rf /var/cache/apt/archives diff --git a/ci/windows/Dockerfile_cpu b/ci/windows/Dockerfile_cpu index 8a62212ce12d4..15f031ee106ed 100644 --- a/ci/windows/Dockerfile_cpu +++ b/ci/windows/Dockerfile_cpu @@ -31,5 +31,8 @@ RUN conda create -n py37 python=3.7 # TODO: replace this when merge into ci COPY win_build_test.ps1 C:\win_build_test.ps1 +# Powershell 7 +RUN choco install -y pwsh + # MUST use x64 as entry point ENTRYPOINT ["C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC\\Auxiliary\\build\\vcvars64.bat", "&&", "powershell.exe", "-NoLogo", "-ExecutionPolicy", "Bypass"]