This repository has been archived by the owner on Oct 25, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 211
54 lines (45 loc) · 2.51 KB
/
chatbot-inference-mpt-7b-chat.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
name: Chatbot inference on mosaicml/mpt-7b-chat
on:
workflow_call:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-inf-mpt-7b
cancel-in-progress: true
permissions:
contents: read
jobs:
inference:
name: inference test
runs-on: neural-chat-inference
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Load environment variables
run:
cat ~/actions-runner/.env >> $GITHUB_ENV
- name: Build Docker Image
run:
if [ $(docker images | grep chatbotinfer-1-gha | wc -l) == 0 ]; then
docker build --no-cache ./ --target cpu --build-arg REPO=${{ github.server_url }}/${{ github.repository }}.git --build-arg REPO_PATH="." --build-arg http_proxy="${{ env.HTTP_PROXY_IMAGE_BUILD }}" --build-arg https_proxy="${{ env.HTTPS_PROXY_IMAGE_BUILD }}" -f intel_extension_for_transformers/neural_chat/docker/Dockerfile -t chatbotinfer-1-gha:latest && yes | docker container prune && yes | docker image prune;
fi
- name: Start Docker Container
run: |
cid=$(docker ps -q --filter "name=chatbotinfer-gha")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid; fi
docker run -tid -v /home/sdp/.cache/huggingface/hub:/root/.cache/huggingface/hub -e http_proxy="${{ env.HTTP_PROXY_CONTAINER_RUN }}" -e https_proxy="${{ env.HTTPS_PROXY_CONTAINER_RUN }}" --name="chatbotinfer-gha" --hostname="chatbotinfer-gha-container" chatbotinfer-1-gha:latest
- name: Run Inference Test
run: |
docker exec "chatbotinfer-gha" bash -c "cd /intel-extension-for-transformers; \
git config --global --add safe.directory '*' && \
git submodule update --init --recursive && \
pip uninstall intel-extension-for-transformers -y; \
pip install -r requirements.txt; \
python setup.py install; \
pip install -r intel_extension_for_transformers/neural_chat/requirements.txt; \
python workflows/chatbot/inference/generate.py --base_model_path \"mosaicml/mpt-7b-chat\" --instructions \"Transform the following sentence into one that shows contrast. The tree is rotten.\" "
- name: Stop Container
if: always()
run: |
cid=$(docker ps -q --filter "name=chatbotinfer-gha")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid; fi
- name: Test Summary
run: echo "Inference completed successfully"