Skip to content

lightning speeds on clickhouse cloud (#3051) #7498

lightning speeds on clickhouse cloud (#3051)

lightning speeds on clickhouse cloud (#3051) #7498

Workflow file for this run

name: e2e tests
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
jawnClient:
name: Jawn Client
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Make sure types were generated
working-directory: valhalla/jawn
run: |
npm install
bash tsoa_run.sh
python3 genTypes.py
if git status | grep -e "jawnTypes" -e "swagger"; then
echo "Please run the type generation script to update the types."
git diff
exit 1
fi
costs:
name: Cost Calculation Package
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Run node tests
working-directory: costs
run: |
npm install
npm run test
- name: Ensure copy was ran
working-directory: costs
run: |
# Make sure the content of /src is copied to ../worker/src/packages/cost and ../web/packages/cost before pushing
# we can do this by calculating size of the content of /src and then copying it to the respective folders
# Compute the size of the content of /src
DIR1=../worker/src/packages/cost
DIR2=./src
# Generate hash lists
find $DIR1 -type f -print0 | sort -z | xargs -0 sha256sum | awk '{print $1}' > /tmp/dir1_hashes_only.txt
find $DIR2 -type f -print0 | sort -z | xargs -0 sha256sum | awk '{print $1}' > /tmp/dir2_hashes_only.txt
# Compare the hash lists
if cmp -s /tmp/dir1_hashes_only.txt /tmp/dir2_hashes_only.txt; then
echo "Directories have the same contents."
else
echo "Directories have different contents."
echo "Please run the copy script to update the contents of the directories."
exit 1
fi
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Lint worker
run: |
cd worker
yarn install
yarn lint
tests:
name: Python E2E tests
runs-on: ubuntu-latest
services:
clickhouse:
image: yandex/clickhouse-server
ports:
- 8123:8123
steps:
- uses: actions/checkout@v3
- uses: supabase/[email protected]
with:
version: latest
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: "18"
- name: Cache npm packages
uses: actions/cache@v3
with:
path: |
./worker/node_modules
key: ${{ runner.os }}-node-${{ hashFiles('./worker/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install node dependencies
run: |
mkdir -p ~/test-logs
cd worker
npm install
- name: Run Jawn
working-directory: valhalla/jawn
env:
SUPABASE_CREDS: '{"url": "http://localhost:54321", "service_role_key": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU"}'
SUPABASE_DATABASE_URL: "postgresql://postgres:postgres@localhost:54322/postgres"
VERCEL_ENV: "development"
S3_ENDPOINT: "http://localhost:9000"
S3_ACCESS_KEY: "minioadmin"
S3_SECRET_KEY: "minioadmin"
S3_BUCKET_NAME: "request-response-storage"
run: |
yarn
yarn dev > ~/test-logs/jawn.log &
- name: Start proxy worker
run: |
cd worker
npx wrangler dev --port 8787 --var WORKER_TYPE:OPENAI_PROXY PROMPTARMOR_API_KEY:${{ secrets.PROMPTARMOR_API_KEY }} > ~/test-logs/proxy.log &
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- name: Start Anthropic proxy worker
run: |
cd worker
npx wrangler dev --port 8790 --var WORKER_TYPE:ANTHROPIC_PROXY PROMPTARMOR_API_KEY:${{ secrets.PROMPTARMOR_API_KEY }} > ~/test-logs/proxy.log &
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- name: Start async worker
run: |
cd worker
npx wrangler dev --port 8788 --var WORKER_TYPE:HELICONE_API > ~/test-logs/async.log &
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- name: Start gateway worker
run: |
cd worker
npx wrangler dev --port 8789 --var WORKER_TYPE:GATEWAY_API PROMPTARMOR_API_KEY:${{ secrets.PROMPTARMOR_API_KEY }} > ~/test-logs/gateway.log &
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- name: Start Minio S3
run: |
pip install Minio
python minio_hcone.py --start &
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- name: Start Supabase DB & API
run: supabase start db,api
- name: Install python dependencies
run: |
pip install requests pytest psycopg2 python-dotenv helicone httpx minio
- name: Wait for Jawn
run: |
echo "Waiting for Jawn to be ready..."
while ! curl -s http://localhost:8585/healthcheck; do
echo "Waiting for Jawn to be ready..."
sleep 5
done
- name: Run integration tests
working-directory: tests
env:
HELICONE_PROXY_URL: "http://127.0.0.1:8787/v1"
HELICONE_ASYNC_URL: "http://127.0.0.1:8788"
HELICONE_GATEWAY_URL: "http://127.0.0.1:8789"
ANTHROPIC_PROXY_URL: "http://127.0.0.1:8790/v1"
SUPABASE_KEY: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU"
SUPABASE_URL: "http://localhost:54321"
HELICONE_API_KEY: "sk-helicone-aizk36y-5yue2my-qmy5tza-n7x3aqa"
OPENAI_API_KEY: ${{ secrets.CI_OPENAI_API_KEY }}
OPENAI_ORG: ${{ secrets.CI_OPENAI_ORG }}
ANTHROPIC_API_KEY: ${{ secrets.CI_ANTHROPIC_API_KEY }}
run: |
# pytest python_integration_tests.py
- name: Run examples tests
working-directory: examples
env:
HELICONE_PROXY_URL: "http://127.0.0.1:8787/v1"
HELICONE_ASYNC_URL: "http://127.0.0.1:8788"
HELICONE_GATEWAY_URL: "http://127.0.0.1:8789"
ANTHROPIC_PROXY_URL: "http://127.0.0.1:8790/v1"
SUPABASE_KEY: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU"
SUPABASE_URL: "http://localhost:54321"
HELICONE_API_KEY: "sk-helicone-aizk36y-5yue2my-qmy5tza-n7x3aqa"
OPENAI_API_KEY: ${{ secrets.CI_OPENAI_API_KEY }}
OPENAI_ORG: ${{ secrets.CI_OPENAI_ORG }}
ANTHROPIC_API_KEY: ${{ secrets.CI_ANTHROPIC_API_KEY }}
run: |
# TODO run e2e tests from the example folder
- name: Upload logs on failure
if: failure()
uses: actions/upload-artifact@v3
with:
name: job-logs
path: |
~/test-logs/*.log