Skip to content

Use new blockwise unpack collection in array (#1173) #199

Use new blockwise unpack collection in array (#1173)

Use new blockwise unpack collection in array (#1173) #199

Workflow file for this run

name: Conda build
on:
push:
branches:
- main
tags:
- "*"
pull_request:
paths:
- setup.py
- ci/recipe/**
- .github/workflows/conda.yml
- pyproject.toml
# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
concurrency:
group: conda-${{ github.ref }}
cancel-in-progress: true
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
jobs:
conda:
name: Build (and upload)
runs-on: ubuntu-latest
steps:
- uses: actions/[email protected]
with:
fetch-depth: 0
- name: Set up Python
uses: conda-incubator/[email protected]
with:
miniforge-version: latest
use-mamba: true
python-version: 3.9
channel-priority: strict
- name: Install dependencies
run: |
mamba install -c conda-forge boa conda-verify
which python
pip list
mamba list
- name: Build conda packages
run: |
# suffix for pre-release package versions
export VERSION_SUFFIX=a`date +%y%m%d`
# conda search for the latest dask-core pre-release
arr=($(conda search --override-channels -c dask/label/dev dask-core | tail -n 1))
# extract dask-core pre-release version / build
export DASK_CORE_VERSION=${arr[1]}
# distributed pre-release build
conda mambabuild ci/recipe \
--channel dask/label/dev \
--no-anaconda-upload \
--output-folder .
- name: Upload conda packages
if: github.event_name == 'push' && github.repository == 'dask/dask-expr'
env:
ANACONDA_API_TOKEN: ${{ secrets.DASK_CONDA_TOKEN }}
run: |
# install anaconda for upload
mamba install -c conda-forge anaconda-client
anaconda upload --label dev noarch/*.tar.bz2