Skip to content

Merge pull request #2 from bigeyedata/andy/workflow-test #7

Merge pull request #2 from bigeyedata/andy/workflow-test

Merge pull request #2 from bigeyedata/andy/workflow-test #7

name: Deploy virtual tables
on:
push:
branches: [ main ]
env:
BIGEYE_CREDENTIALS: ${{ secrets.BIGEYE_API_CREDENTIALS }}
BIGEYE_CONFIG: ${{ secrets.BIGEYE_API_CONFIGS }}
jobs:
deploy_virtual_tables:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [ "3.9" ]
steps:
- uses: actions/[email protected]
with:
fetch-depth: 0
- name: Set up Python
uses: actions/[email protected]
with:
python-version: ${{ matrix.python-version }}
- name: Verify Changed files
uses: tj-actions/changed-files@v44
id: verify-changed-files
with:
files: |
virtual_tables/**.yaml
- name: Deploy virtual tables
if: steps.verify-changed-files.outputs.any_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.all_changed_files }}
run: |
python -m pip install --upgrade pip
python -m pip install bigeye-cli
wget https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 -O /usr/bin/yq && chmod +x /usr/bin/yq
echo "$BIGEYE_CONFIG" >> config.ini
echo "$BIGEYE_CREDENTIALS" >> credentials.ini
for file in ${CHANGED_FILES}; do
echo "Virtual table upsert for ${file}"
vt_info=$(<"${file}")
vtn=$(echo $vt_info | yq '.table_name')
sn=$(echo $vt_info | yq '.source_name')
sql=$(echo $vt_info | yq '.sql')
bigeye catalog upsert-virtual-table -vtn "${vtn}" -sn "${sn}" --sql "${sql}"
done