-
Notifications
You must be signed in to change notification settings - Fork 22
481 lines (417 loc) · 18.7 KB
/
ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
name: Build and release
on:
workflow_dispatch:
push:
branches:
- '**'
env:
native_image_opts: --verbose -H:Log=registerResource:verbose -H:+PrintClassInitialization
graal_distribution: graalvm-community
graal_java_version: 21
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Check-out source code
uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '17'
- name: PROD - Prepare GitHub release
id: create_prod_release
uses: googleapis/release-please-action@v4
if: github.ref == 'refs/heads/v2.x'
with:
skip-github-pull-request: true
target-branch: v2.x
release-type: simple
- name: PROD - Define release info
if: steps.create_prod_release.outputs.release_created
run: |
tag=${{steps.create_prod_release.outputs.tag_name}}
version=${{steps.create_prod_release.outputs.version}}
major=${{steps.create_prod_release.outputs.major}}
minor=${{steps.create_prod_release.outputs.minor}}
patch=${{steps.create_prod_release.outputs.patch}}
echo DO_BUILD=true >> $GITHUB_ENV
echo DO_RELEASE=true >> $GITHUB_ENV
echo DO_PROD_RELEASE=true >> $GITHUB_ENV
echo RELEASE_TAG=${tag} >> $GITHUB_ENV
echo RELEASE_VERSION=${version} >> $GITHUB_ENV
echo VERSION_MAJOR=${major} >> $GITHUB_ENV
echo VERSION_MINOR=${minor} >> $GITHUB_ENV
echo VERSION_PATCH=${patch} >> $GITHUB_ENV
- name: DEV - Define release info
if: startsWith(github.ref, 'refs/heads/') && !env.DO_PROD_RELEASE
run: |
branch="${GITHUB_REF#refs/heads/}"
tag="dev_${branch//[^a-zA-Z0-9_.-]/.}" # Replace all special characters by a dot
major="0"
minor="$(date +'%Y%m%d')"
patch="$(date +'%H%M%S')-$tag"
version="${major}.${minor}.${patch}"
echo DO_BUILD=true >> $GITHUB_ENV # We always want to do a build if we're building a branch
echo BRANCH=${branch} >> $GITHUB_ENV
echo RELEASE_TAG=${tag} >> $GITHUB_ENV
echo RELEASE_VERSION=${version} >> $GITHUB_ENV
echo VERSION_MAJOR=${major} >> $GITHUB_ENV
echo VERSION_MINOR=${minor} >> $GITHUB_ENV
echo VERSION_PATCH=${patch} >> $GITHUB_ENV
if git ls-remote --exit-code origin refs/tags/${tag} >/dev/null 2>&1; then
echo "Found tag ${tag}, development release will be published"
echo DO_RELEASE=true >> $GITHUB_ENV
echo DO_DEV_RELEASE=true >> $GITHUB_ENV
else
echo "Tag ${tag} does not exist, no development release will be published"
fi
- name: Build release ${{env.RELEASE_VERSION}}
if: env.DO_BUILD
run: ./gradlew clean build dist distThirdPartyReleaseAsset distFtest -Pversion=${{env.RELEASE_VERSION}}
- name: Check fcli version
if: env.DO_BUILD
run: java -jar build/libs/fcli.jar --version | tee /dev/stderr | grep -E '[0-9]+\.[0-9]+\.[0-9]+' >/dev/null || (echo "fcli --version doesn't output proper version number"; exit 1)
- name: Publish build artifacts
uses: actions/upload-artifact@v4
with:
name: build-output
path: build/dist/**/*
outputs:
do_release: ${{ env.DO_RELEASE }}
do_prod_release: ${{ env.DO_PROD_RELEASE }}
do_dev_release: ${{ env.DO_DEV_RELEASE }}
release_tag: ${{ env.RELEASE_TAG }}
release_version: ${{ env.RELEASE_VERSION }}
version_major: ${{ env.VERSION_MAJOR }}
version_minor: ${{ env.VERSION_MINOR }}
version_patch: ${{ env.VERSION_PATCH }}
native_linux:
name: native-image-linux
needs: build
runs-on: ubuntu-22.04
# env:
# TOOLCHAIN_BASE: /opt/musl_cc
# TOOLCHAIN_DIR: /opt/musl_cc/x86_64-linux-musl-native
# CC: /opt/musl_cc/x86_64-linux-musl-native/bin/gcc
steps:
- name: Check-out source code
uses: actions/checkout@v4
- uses: graalvm/setup-graalvm@v1
with:
distribution: ${{ env.graal_distribution }}
java-version: ${{ env.graal_java_version }}
native-image-musl: true
github-token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/download-artifact@v4
with:
path: ./artifacts
name: build-output
# For Linux, we build a statically linked native image, to allow for building a 'FROM scratch'
# Docker image, and to avoid libc version issues. Since Jansi is not supported on statically
# linked images (see https://github.com/fusesource/jansi/issues/246), we set a system property
# to indicate that FortifyCLI shouldn't try to invoke AnsiConsole::systemInstall/Uninstall. In
# order for FortifyCLI to be able to see this system property, we need to initialize this class
# at build time (see https://www.graalvm.org/22.1/reference-manual/native-image/Properties/).
# We also exclude the native Jansi library resources, as these are now no longer needed.
- name: Create native fcli
run: native-image ${{ env.native_image_opts }} --static --libc=musl -Djansi.disable=true --initialize-at-build-time=com.fortify.cli.app.FortifyCLI -H:ExcludeResources="org/fusesource/jansi/internal/native/.*" -jar ./artifacts/release-assets/fcli.jar fcli
- name: Compress native fcli
uses: svenstaro/upx-action@v2
with:
files: fcli
- name: Basic test of native fcli
run: ./fcli --help && ./fcli get --help
- name: Check fcli version
run: ./fcli --version | tee /dev/stderr | grep -E '[0-9]+\.[0-9]+\.[0-9]+' >/dev/null || (echo "fcli --version doesn't output proper version number"; exit 1)
- name: Package native fcli
run: tar -zcvf artifacts/release-assets/fcli-linux.tgz fcli -C ./artifacts fcli_completion
- uses: actions/upload-artifact@v4
with:
path: ./artifacts/**/fcli-linux.tgz
name: fcli-linux
native_mac:
name: native-image-mac
needs: build
runs-on: macos-latest
steps:
- name: Check-out source code
uses: actions/checkout@v4
- uses: graalvm/setup-graalvm@v1
with:
distribution: ${{ env.graal_distribution }}
java-version: ${{ env.graal_java_version }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/download-artifact@v4
with:
path: ./artifacts
name: build-output
# For MacOS, we build a dynamically linked image. Jansi by default provides a resource-config.json
# file to include native libraries for all platforms; we override this to include only the MacOS
# libraries
- name: Create native fcli
run: native-image ${{ env.native_image_opts }} -march=compatibility -H:ExcludeResources="org/fusesource/jansi/internal/native/Windows/.*" -H:ExcludeResources="org/fusesource/jansi/internal/native/Linux/.*" -H:ExcludeResources="org/fusesource/jansi/internal/native/FreeBSD/.*" -jar ./artifacts/release-assets/fcli.jar fcli
# Disabled for now, as compressed binaries crash on macOS Ventura or above
#- name: Compress native fcli
# uses: svenstaro/upx-action@v2
# with:
# files: fcli
- name: Basic test of native fcli
run: ./fcli --help && ./fcli get --help
- name: Package native fcli
run: tar -zcvf ./artifacts/release-assets/fcli-mac.tgz fcli -C ./artifacts fcli_completion
- uses: actions/upload-artifact@v4
with:
path: ./artifacts/**/fcli-mac.tgz
name: fcli-mac
native_win:
name: native-image-win
needs: build
runs-on: windows-2022
steps:
- uses: graalvm/setup-graalvm@v1
with:
distribution: ${{ env.graal_distribution }}
java-version: ${{ env.graal_java_version }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/download-artifact@v4
with:
path: ./artifacts
name: build-output
# For Windows, we build a dynamically linked image. Jansi by default provides a resource-config.json
# file to include native libraries for all platforms; we override this to include only the 64-bit
# Windows library
- name: Create native fcli
run: >-
"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat" &&
${{ env.JAVA_HOME }}\bin\native-image.cmd ${{ env.native_image_opts }} -H:ExcludeResources="org/fusesource/jansi/internal/native/Mac/.*" -H:ExcludeResources="org/fusesource/jansi/internal/native/Linux/.*" -H:ExcludeResources="org/fusesource/jansi/internal/native/FreeBSD/.*" -jar .\artifacts\release-assets\fcli.jar fcli
shell: cmd
# We don't compress the Windows binary for now as this is incompatible with current Graal version.
# See https://github.com/fortify/fcli/issues/148
# - name: Compress native fcli
# uses: svenstaro/upx-action@v2
# with:
# files: fcli.exe
- name: Basic test of native fcli
run: |
.\fcli.exe --help
.\fcli.exe get --help
- name: Package native fcli
run: 7z a artifacts\release-assets\fcli-windows.zip fcli*.exe
- uses: actions/upload-artifact@v4
with:
path: ./artifacts/**/fcli-windows.zip
name: fcli-windows
combine-artifacts:
needs: [build, native_linux, native_mac, native_win]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
merge-multiple: true
- run: |
cd ./artifacts/release-assets
for f in *; do
sha256sum ${f} > ${f}.sha256
done
for f in *; do
openssl dgst -sha256 -passin env:SIGN_PASSPHRASE -sign <(echo "${SIGN_KEY}") -out ${f}.rsa_sha256 ${f}
done
env:
SIGN_PASSPHRASE: ${{ secrets.SIGN_PASSPHRASE }}
SIGN_KEY: ${{ secrets.SIGN_KEY }}
- uses: actions/upload-artifact@v4
with:
path: ./artifacts
name: combined-artifacts
# For now, we only publish fcli-scratch image, but we do build the other images just for testing
docker-linux:
needs: [build, native_linux]
runs-on: ubuntu-latest
env:
DOCKER_SRC: fcli-other/fcli-docker/linux
steps:
- name: Check-out source code
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
name: fcli-linux
- name: Build & test Linux images
shell: bash
run: |
tar -zxvf ./artifacts/release-assets/fcli-linux.tgz -C ${DOCKER_SRC}
cd ${DOCKER_SRC}
for i in scratch alpine ubi9
do
docker build . --target fcli-${i} -t fcli-${i}
mkdir ${PWD}/${i}
docker run --rm -u $(id -u):$(id -g) -v ${PWD}/${i}:/data fcli-${i} fcli tool sc-client install
test -f ${PWD}/${i}/fortify/tools/bin/scancentral
done
- name: Docker Login
if: needs.build.outputs.do_release
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: DEV - Tag Linux images
if: needs.build.outputs.do_dev_release
shell: bash
run: |
docker tag fcli-scratch fortifydocker/fcli:${{needs.build.outputs.release_tag}}
- name: PROD - Tag Linux images
if: needs.build.outputs.do_prod_release
shell: bash
run: |
docker tag fcli-scratch fortifydocker/fcli:latest
docker tag fcli-scratch fortifydocker/fcli:${{needs.build.outputs.version_major}}
docker tag fcli-scratch fortifydocker/fcli:${{needs.build.outputs.version_major}}.${{needs.build.outputs.version_minor}}
docker tag fcli-scratch fortifydocker/fcli:${{needs.build.outputs.version_major}}.${{needs.build.outputs.version_minor}}.${{needs.build.outputs.version_patch}}
# TODO Should we sign the images as well?
- name: Publish Linux Docker images
if: needs.build.outputs.do_release
shell: bash
run: |
docker push --all-tags fortifydocker/fcli
# For now, we only publish fcli-scratch Linux image, but we do build the Windows images just for testing
docker-win:
needs: [build, native_win]
runs-on: windows-2022
env:
DOCKER_SRC: fcli-other/fcli-docker/windows
steps:
- name: Check-out source code
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
name: fcli-windows
- name: Build & test Windows images
shell: bash
run: |
unzip ./artifacts/release-assets/fcli-windows.zip -d ${DOCKER_SRC}
cd ${DOCKER_SRC}
for i in ltsc2022
do
docker build . --target fcli-${i} -t fcli-${i}
# Following doesn't work yet, hence the echo statements instead of actually running these.
# Likely, we need to pass Windows-style paths to volume mappings in docker run command
mkdir ${PWD}/${i}
echo docker run --rm -v ${PWD}/${i}:/data fcli-${i} fcli tool sc-client install
echo test -f ${PWD}/${i}/fortify/tools/bin/scancentral
done
- name: DEV - Tag Windows images
if: needs.build.outputs.do_dev_release
shell: bash
run: |
docker tag fcli-ltsc2022 fortifydocker/fcli:${{needs.build.outputs.release_tag}}
- name: PROD - Tag Windows images
if: needs.build.outputs.do_prod_release
shell: bash
run: |
docker tag fcli-ltsc2022 fortifydocker/fcli:ltsc2022-latest
docker tag fcli-ltsc2022 fortifydocker/fcli:ltsc2022-${{needs.build.outputs.version_major}}
docker tag fcli-ltsc2022 fortifydocker/fcli:ltsc2022-${{needs.build.outputs.version_major}}.${{needs.build.outputs.version_minor}}
docker tag fcli-ltsc2022 fortifydocker/fcli:ltsc2022-${{needs.build.outputs.version_major}}.${{needs.build.outputs.version_minor}}.${{needs.build.outputs.version_patch}}
#- name: Docker Login
# if: needs.build.outputs.do_release
# uses: docker/login-action@v3
# with:
# username: ${{ secrets.DOCKER_USERNAME }}
# password: ${{ secrets.DOCKER_PASSWORD }}
# TODO Should we sign the images as well?
#- name: Publish Linux Docker images
# if: needs.build.outputs.do_release
# shell: bash
# run: |
# docker push --all-tags fortifydocker/fcli
release:
name: release
if: needs.build.outputs.do_release
needs: [build, native_linux, native_mac, native_win, combine-artifacts]
runs-on: ubuntu-latest
steps:
- name: Check-out source code
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
name: combined-artifacts
- name: PROD - Prepare release PR
if: github.ref == 'refs/heads/v2.x'
uses: googleapis/release-please-action@v4
with:
skip-github-release: true
release-type: simple
target-branch: v2.x
- name: DEV - Prepare GitHub release
if: needs.build.outputs.do_dev_release
run: |
gh release delete ${{ needs.build.outputs.release_tag }} -y || true
gh release create ${{ needs.build.outputs.release_tag }} -p -t "Development Release - ${GITHUB_REF#refs/heads/} branch" -n 'See `Assets` section below for latest build artifacts'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: DEV - Update ${{ needs.build.outputs.release_tag }} tag
uses: richardsimko/update-tag@v1
if: needs.build.outputs.do_dev_release
with:
tag_name: ${{ needs.build.outputs.release_tag }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload assets to release
if: needs.build.outputs.do_release
run: |
files=$(find "./artifacts/release-assets" -type f -printf "%p ")
gh release upload "${{ needs.build.outputs.release_tag }}" $files --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publishPages:
name: publishPages
if: needs.build.outputs.do_release
needs: [build]
runs-on: ubuntu-latest
steps:
- name: Check-out existing docs from gh-pages branch
uses: actions/checkout@v4
with:
ref: gh-pages
path: docs
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
name: build-output
- name: Update documentation from artifact
run: |
# Delete all Git-related files
rm -rf docs/.git*
# Extract top-level documentation resources
# TODO Should we do this only when building a release, or also for dev_v2.x,
# or for all (dev & release) versions like we do now?
unzip -o artifacts/docs-gh-pages.zip -d "docs"
# Define the output directory, based on tag/branch name
versionDir=docs/${{ needs.build.outputs.release_tag }}
# Delete, recreate and fill the directory for the current tag/branch name,
# while leaving documentation for other tags/branches intact (as checked out above)
rm -rf "${versionDir}"
mkdir -p "${versionDir}"
unzip artifacts/docs-jekyll.zip -d "${versionDir}"
# Recreate version data files, which may be empty if no versions available
cd docs
mkdir -p _data/versions
touch _data/versions/release.yml
touch _data/versions/dev.yml
ls -d v*.*.* | sort -rV | while read line; do echo "- '$line'"; done > _data/versions/release.yml
ls -d dev_* | sort | while read line; do echo "- '$line'"; done > _data/versions/dev.yml
- name: Deploy documentation
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs
enable_jekyll: true