Skip to content

Remove Dask and Spark DataFrame Support #2863

Remove Dask and Spark DataFrame Support

Remove Dask and Spark DataFrame Support #2863

Workflow file for this run

name: Install Test
on:
pull_request:
types: [opened, synchronize]
push:
branches:
- main
env:
ALTERYX_OPEN_SRC_UPDATE_CHECKER: False
jobs:
install_ft_complete:
name: ${{ matrix.os }} - ${{ matrix.python_version }} install featuretools complete
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python_version: ["3.9", "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Set up python ${{ matrix.python_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
cache: 'pip'
cache-dependency-path: 'pyproject.toml'
- uses: actions/cache@v3
id: cache
with:
path: ${{ env.pythonLocation }}
key: ${{ matrix.os- }}-${{ matrix.python_version }}-install-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml') }}-v01
- name: Build featuretools package
run: |
make package
- name: Install complete version of featuretools from sdist (not using cache)
if: steps.cache.outputs.cache-hit != 'true'
run: |
python -m pip install "unpacked_sdist/[complete]"
- name: Install complete version of featuretools from sdist (using cache)
if: steps.cache.outputs.cache-hit == 'true'
run: |
python -m pip install "unpacked_sdist/[complete]" --no-deps
- name: Test by importing packages
run: |
python -c "import alteryx_open_src_update_checker"
python -c "from featuretools_sql import DBConnector"
- name: Check package conflicts
run: |
python -m pip check
- name: Verify extra_requires commands
run: |
python -m pip install "unpacked_sdist/[nlp,updater,sql]"