forked from cncf-tags/green-reviews-tooling
-
Notifications
You must be signed in to change notification settings - Fork 0
134 lines (119 loc) · 4.03 KB
/
benchmark-pipeline.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
name: Benchmark Pipeline
on:
workflow_dispatch:
inputs:
cncf_project:
description: Project to be deployed e.g. falco
required: true
type: choice
options:
- falco
config:
description: Configuration if project has multiple variants they wish to test (defaults to all)
required: false
type: string
version:
description: Version of project to be tested e.g. 0.37.0
required: true
type: string
benchmark_job_url:
description: URL of the benchmark job
required: true
type: string
benchmark_job_duration_mins:
description: Duration of the benchmark job
required: true
type: number
concurrency:
group: benchmark
cancel-in-progress: false
jobs:
print_summary:
runs-on: ubuntu-latest
steps:
- name: Add to Summary
run: |
echo "## Workflow Input Parameters" >> $GITHUB_STEP_SUMMARY
echo "| Parameter | Value |" >> $GITHUB_STEP_SUMMARY
echo "| --- | --- |" >> $GITHUB_STEP_SUMMARY
echo "| cncf_project | ${{ github.event.inputs.cncf_project }} |" >> $GITHUB_STEP_SUMMARY
echo "| config | ${{ github.event.inputs.config }} |" >> $GITHUB_STEP_SUMMARY
echo "| version | ${{ github.event.inputs.version }} |" >> $GITHUB_STEP_SUMMARY
echo "| benchmark_job_url | ${{ github.event.inputs.benchmark_job_url }} |" >> $GITHUB_STEP_SUMMARY
echo "| benchmark_job_duration_mins | ${{ github.event.inputs.benchmark_job_duration_mins }} |" >> $GITHUB_STEP_SUMMARY
deploy:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: azure/setup-kubectl@v4
with:
version: v1.30.2
id: install
- run: mkdir ~/.kube && echo "${{ secrets.KUBECONFIG }}" > ~/.kube/config
- name: Select the manifest
run: |
MANIFEST=projects/${{ inputs.cncf_project }}
CONFIG=${{ inputs.config }}
if [[ -n $CONFIG ]]; then
echo "Configuration provided"
MANIFEST=$MANIFEST/$CONFIG.yaml
else
MANIFEST=$MANIFEST/${{ inputs.cncf_project }}.yaml
fi
if ! test -f "$MANIFEST"; then
echo "The provided inputs are invalid."
exit 1
fi
export VERSION=${{ inputs.version }}
envsubst < $MANIFEST > manifest.yaml
- uses: actions/upload-artifact@v4
with:
name: manifest
path: manifest.yaml
- name: Apply the manifest
run: |
kubectl apply -f manifest.yaml
sleep 20
kubectl wait pod \
--all \
--for=condition=Ready \
--namespace=benchmark
benchmark-job:
runs-on: ubuntu-24.04
needs: deploy
steps:
- uses: actions/checkout@v4
- uses: azure/setup-kubectl@v4
with:
version: v1.30.2
id: install
- run: mkdir ~/.kube && echo "${{ secrets.KUBECONFIG }}" > ~/.kube/config
- name: Run the benchmark job
run: |
kubectl apply -f ${{ inputs.benchmark_job_url }}
sleep 20
kubectl wait pod \
--all \
--for=condition=Ready \
--namespace=falco # TODO: Revert to "benchmark" this after merging https://github.com/falcosecurity/cncf-green-review-testing/pull/22
- name: Wait for the benchmark job to complete
run: |
sleep ${{ inputs.benchmark_job_duration_mins }}m
- name: Delete the benchmark job
run: |
kubectl delete -f ${{ inputs.benchmark_job_url }} --wait
delete:
runs-on: ubuntu-24.04
needs: benchmark-job
if: ${{ always() }}
steps:
- uses: actions/checkout@v4
- uses: azure/setup-kubectl@v4
with:
version: v1.30.2
id: install
- run: mkdir ~/.kube && echo "${{ secrets.KUBECONFIG }}" > ~/.kube/config
- uses: actions/download-artifact@v4
with:
name: manifest
- run: kubectl delete -f manifest.yaml --wait