From 9a26366605160ce8b3aecf442b32280e3af1fb43 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:34:13 +0000 Subject: [PATCH 01/38] fix(pipx-package): fix pipx inject command --- src/pipx-package/devcontainer-feature.json | 2 +- src/pipx-package/install.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pipx-package/devcontainer-feature.json b/src/pipx-package/devcontainer-feature.json index 88248bc24..c8e1d7a8e 100644 --- a/src/pipx-package/devcontainer-feature.json +++ b/src/pipx-package/devcontainer-feature.json @@ -1,7 +1,7 @@ { "name": "Pipx package", "id": "pipx-package", - "version": "1.1.7", + "version": "1.1.8", "description": "Installs a pipx package.", "documentationURL": "http://github.com/devcontainers-contrib/features/tree/main/src/pipx-package", "installsAfter": [ diff --git a/src/pipx-package/install.sh b/src/pipx-package/install.sh index f2b9c28dd..f757fd121 100755 --- a/src/pipx-package/install.sh +++ b/src/pipx-package/install.sh @@ -149,7 +149,7 @@ install_via_pipx() { injections_array_length="${#injections_array[@]}" for ((i = 0; i < ${injections_array_length}; i++)); do - ${pipx_bin} inject "$PACKAGE" --pip-args '--no-cache-dir --force-reinstall' -f "${injections_array[$i]}" + ${pipx_bin} inject --pip-args '--no-cache-dir --force-reinstall' -f "$PACKAGE" "${injections_array[$i]}" done # cleaning pipx to save disk space From 14df380d5bbaa8125e12b75f358e2a779f3ef677 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:54:37 +0000 Subject: [PATCH 02/38] chore(devcontainer): stop and remove explainshell if it's running --- .devcontainer/setup.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh index 82628e28c..d1c809ff4 100755 --- a/.devcontainer/setup.sh +++ b/.devcontainer/setup.sh @@ -4,5 +4,14 @@ npm install -g @devcontainers/cli pipx install shfmt-py +# Check if the explainshell container is running +if [ "$(docker ps -q -f name=explainshell)" ]; then + # Stop the running container + docker stop explainshell + # Remove the container + docker rm explainshell +fi + +# Run a new explainshell container # this will add hover annotations in shell script files, assuming mads-hartmann.bash-ide-vscod is installed docker container run --name explainshell --restart always -p 5000:5000 -d spaceinvaderone/explainshell From e1f135bd94c71d240e4e731819b45f5132b37b7b Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:55:33 +0000 Subject: [PATCH 03/38] chore(devcontainer): bump versions and format --- .devcontainer/devcontainer.json | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4ff14364a..8f73eea86 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,21 +3,20 @@ "customizations": { "vscode": { "extensions": [ - "mads-hartmann.bash-ide-vscode", - "ms-python.python", - "ms-python.vscode-pylance", - "DavidAnson.vscode-markdownlint" - ] + "mads-hartmann.bash-ide-vscode", + "ms-python.python", + "ms-python.vscode-pylance", + "DavidAnson.vscode-markdownlint" + ] } }, "features": { - "ghcr.io/devcontainers/features/python:1.1.0": {}, - "ghcr.io/devcontainers/features/node:1.3.0": {}, - "ghcr.io/devcontainers/features/github-cli:1.0.10": {}, - "ghcr.io/devcontainers/features/docker-in-docker:2.5.0": {}, - "ghcr.io/devcontainers/features/common-utils:2.1.2": {}, + "ghcr.io/devcontainers/features/python:1.6.4": {}, + "ghcr.io/devcontainers/features/node:1.6.0": {}, + "ghcr.io/devcontainers/features/github-cli:1.0.13": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": {}, + "ghcr.io/devcontainers/features/common-utils:2.5.1": {}, "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {} - }, "postCreateCommand": "/bin/bash -ex ./.devcontainer/setup.sh > postCreateCommand.log" } \ No newline at end of file From 2b8fb38d9b53c6cff8ff70bc04ea529825b70fcc Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 07:09:09 +0000 Subject: [PATCH 04/38] fix(ci): set features to test for scheduled run --- .github/workflows/test.yaml | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c276e7a19..39beabd23 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -63,26 +63,28 @@ jobs: - name: "resolving features to test" id: resolve_features run: | - if [ ${{ github.event_name }} == 'pull_request' ]; then + if [ ${{ github.event_name }} == 'pull_request' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT - elif [ ${{ github.event_name }} == 'push' ]; then + elif [ ${{ github.event_name }} == 'push' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT - elif [ ${{ github.event_name }} == 'workflow_dispatch' ]; then - + elif [ ${{ github.event_name }} == 'workflow_dispatch' ]; then if [ ${{ inputs.on_changes_only }} == 'true' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT else echo 'features_to_test=${{ needs.find-features.outputs.all-features }}' >> $GITHUB_OUTPUT fi - elif [ ${{ github.event_name }} == 'workflow_call' ]; then + elif [ ${{ github.event_name }} == 'workflow_call' ]; then if [ ${{ inputs.on_changes_only }} == 'true' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT else echo 'features_to_test=${{ needs.find-features.outputs.all-features }}' >> $GITHUB_OUTPUT - fi + fi + + elif [ ${{ github.event_name }} == 'schedule' ]; then + echo 'features_to_test=${{ needs.find-features.outputs.all-features }}' >> $GITHUB_OUTPUT fi - name: binning From c353ad7f785652aa0f474f1d759b3dd92914d244 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:38:21 +0000 Subject: [PATCH 05/38] fix(ansible): use devcontainers-extra/features/pipx-package:1.1.8 --- src/ansible/devcontainer-feature.json | 2 +- src/ansible/install.sh | 11 +++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/ansible/devcontainer-feature.json b/src/ansible/devcontainer-feature.json index 134aa951a..cd3813dca 100644 --- a/src/ansible/devcontainer-feature.json +++ b/src/ansible/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/ansible/install.sh b/src/ansible/install.sh index 304632b86..3076d9b0d 100755 --- a/src/ansible/install.sh +++ b/src/ansible/install.sh @@ -1,23 +1,18 @@ - set -e . ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" - $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='ansible-core' --option injections='ansible' --option version="$VERSION" - - echo 'Done!' - From a72b404dddfd0b3a695d8a5fd3948511b4833ff4 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 06:58:23 +0000 Subject: [PATCH 06/38] fix: swich to pipx-package-v1.1.8 --- src/aws-eb-cli/devcontainer-feature.json | 2 +- src/aws-eb-cli/install.sh | 2 +- src/bandit/devcontainer-feature.json | 2 +- src/bandit/install.sh | 2 +- src/bikeshed/devcontainer-feature.json | 2 +- src/bikeshed/install.sh | 2 +- src/black/devcontainer-feature.json | 2 +- src/black/install.sh | 2 +- src/brownie/devcontainer-feature.json | 2 +- src/brownie/install.sh | 2 +- src/buku/devcontainer-feature.json | 2 +- src/buku/install.sh | 2 +- src/checkov/devcontainer-feature.json | 2 +- src/checkov/install.sh | 2 +- src/cloudinary-cli/devcontainer-feature.json | 2 +- src/cloudinary-cli/install.sh | 2 +- src/cookiecutter/devcontainer-feature.json | 2 +- src/cookiecutter/install.sh | 2 +- src/copier/devcontainer-feature.json | 2 +- src/copier/install.sh | 2 +- src/coverage-py/devcontainer-feature.json | 2 +- src/coverage-py/install.sh | 2 +- src/cve-bin-tool/devcontainer-feature.json | 2 +- src/cve-bin-tool/install.sh | 2 +- src/cyclonedx-python/devcontainer-feature.json | 2 +- src/cyclonedx-python/install.sh | 2 +- src/datasette/devcontainer-feature.json | 2 +- src/datasette/install.sh | 2 +- src/dbt-coverage/devcontainer-feature.json | 2 +- src/dbt-coverage/install.sh | 2 +- src/flake8/devcontainer-feature.json | 2 +- src/flake8/install.sh | 2 +- src/flit/devcontainer-feature.json | 2 +- src/flit/install.sh | 2 +- src/gdbgui/devcontainer-feature.json | 2 +- src/gdbgui/install.sh | 2 +- src/glances/devcontainer-feature.json | 2 +- src/glances/install.sh | 2 +- src/hatch/devcontainer-feature.json | 2 +- src/hatch/install.sh | 2 +- src/invoke/devcontainer-feature.json | 2 +- src/invoke/install.sh | 2 +- src/isort/devcontainer-feature.json | 2 +- src/isort/install.sh | 2 +- src/jake/devcontainer-feature.json | 2 +- src/jake/install.sh | 2 +- src/jrnl/devcontainer-feature.json | 2 +- src/jrnl/install.sh | 2 +- src/keepercommander/devcontainer-feature.json | 2 +- src/keepercommander/install.sh | 2 +- src/lektor/devcontainer-feature.json | 2 +- src/lektor/install.sh | 2 +- src/linode-cli/devcontainer-feature.json | 2 +- src/linode-cli/install.sh | 2 +- src/localstack/devcontainer-feature.json | 2 +- src/localstack/install.sh | 2 +- src/mackup/devcontainer-feature.json | 2 +- src/mackup/install.sh | 2 +- src/meltano/devcontainer-feature.json | 2 +- src/meltano/install.sh | 2 +- src/mitmproxy/devcontainer-feature.json | 2 +- src/mitmproxy/install.sh | 2 +- src/mkdocs/devcontainer-feature.json | 2 +- src/mkdocs/install.sh | 2 +- src/mypy/devcontainer-feature.json | 2 +- src/mypy/install.sh | 2 +- src/nox/devcontainer-feature.json | 2 +- src/nox/install.sh | 2 +- src/pdm/devcontainer-feature.json | 2 +- src/pdm/install.sh | 2 +- src/pip-audit/devcontainer-feature.json | 2 +- src/pip-audit/install.sh | 2 +- src/pipenv/devcontainer-feature.json | 2 +- src/pipenv/install.sh | 2 +- src/pipx-package/README.md | 2 +- src/poetry/devcontainer-feature.json | 2 +- src/poetry/install.sh | 2 +- src/pre-commit/devcontainer-feature.json | 2 +- src/pre-commit/install.sh | 2 +- src/pyinfra/devcontainer-feature.json | 2 +- src/pyinfra/install.sh | 2 +- src/pylint/devcontainer-feature.json | 2 +- src/pylint/install.sh | 2 +- src/pyoxidizer/devcontainer-feature.json | 2 +- src/pyoxidizer/install.sh | 2 +- src/pyscaffold/devcontainer-feature.json | 2 +- src/pyscaffold/install.sh | 2 +- src/qrcode/devcontainer-feature.json | 2 +- src/qrcode/install.sh | 2 +- src/ruff/devcontainer-feature.json | 2 +- src/ruff/install.sh | 2 +- src/scancode-toolkit/devcontainer-feature.json | 2 +- src/scancode-toolkit/install.sh | 2 +- src/sigstore-python/devcontainer-feature.json | 2 +- src/sigstore-python/install.sh | 2 +- src/sqlfluff/devcontainer-feature.json | 2 +- src/sqlfluff/install.sh | 2 +- src/sqlfmt/devcontainer-feature.json | 2 +- src/sqlfmt/install.sh | 2 +- src/tox/devcontainer-feature.json | 2 +- src/tox/install.sh | 2 +- src/twine/devcontainer-feature.json | 2 +- src/twine/install.sh | 2 +- src/ufmt/devcontainer-feature.json | 2 +- src/ufmt/install.sh | 2 +- src/vulture/devcontainer-feature.json | 2 +- src/vulture/install.sh | 2 +- src/xonsh/devcontainer-feature.json | 2 +- src/xonsh/install.sh | 2 +- src/yamllint/devcontainer-feature.json | 2 +- src/yamllint/install.sh | 2 +- src/yapf/devcontainer-feature.json | 2 +- src/yapf/install.sh | 2 +- src/youtube-dl/devcontainer-feature.json | 2 +- src/youtube-dl/install.sh | 2 +- src/yt-dlp/devcontainer-feature.json | 2 +- src/yt-dlp/install.sh | 2 +- 117 files changed, 117 insertions(+), 117 deletions(-) diff --git a/src/aws-eb-cli/devcontainer-feature.json b/src/aws-eb-cli/devcontainer-feature.json index 6b4399c0d..6d5a1bef7 100644 --- a/src/aws-eb-cli/devcontainer-feature.json +++ b/src/aws-eb-cli/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/aws-eb-cli/install.sh b/src/aws-eb-cli/install.sh index fbf408226..6869eb6f9 100755 --- a/src/aws-eb-cli/install.sh +++ b/src/aws-eb-cli/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='awsebcli' --option version="$VERSION" diff --git a/src/bandit/devcontainer-feature.json b/src/bandit/devcontainer-feature.json index 24dfc3afd..2d7de4ccb 100644 --- a/src/bandit/devcontainer-feature.json +++ b/src/bandit/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/bandit/install.sh b/src/bandit/install.sh index bb8096acb..0be57149e 100755 --- a/src/bandit/install.sh +++ b/src/bandit/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='bandit' --option version="$VERSION" diff --git a/src/bikeshed/devcontainer-feature.json b/src/bikeshed/devcontainer-feature.json index 60278830e..f3f29a827 100644 --- a/src/bikeshed/devcontainer-feature.json +++ b/src/bikeshed/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/bikeshed/install.sh b/src/bikeshed/install.sh index b3d8a1e7d..79b8c0350 100755 --- a/src/bikeshed/install.sh +++ b/src/bikeshed/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='bikeshed' --option version="$VERSION" diff --git a/src/black/devcontainer-feature.json b/src/black/devcontainer-feature.json index f47659107..1a75a3081 100644 --- a/src/black/devcontainer-feature.json +++ b/src/black/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/black/install.sh b/src/black/install.sh index c43b910d9..a2196002f 100755 --- a/src/black/install.sh +++ b/src/black/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='black' --option version="$VERSION" diff --git a/src/brownie/devcontainer-feature.json b/src/brownie/devcontainer-feature.json index 9414160d6..826289b17 100644 --- a/src/brownie/devcontainer-feature.json +++ b/src/brownie/devcontainer-feature.json @@ -23,6 +23,6 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package" + "ghcr.io/devcontainers-extra/features/pipx-package" ] } \ No newline at end of file diff --git a/src/brownie/install.sh b/src/brownie/install.sh index 6cc49a7d7..ce9a24313 100755 --- a/src/brownie/install.sh +++ b/src/brownie/install.sh @@ -30,7 +30,7 @@ $nanolayer_location \ $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='eth-brownie' --option version="$VERSION" diff --git a/src/buku/devcontainer-feature.json b/src/buku/devcontainer-feature.json index 3955257a8..f3c902e42 100644 --- a/src/buku/devcontainer-feature.json +++ b/src/buku/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/buku/install.sh b/src/buku/install.sh index 7f9791899..8ae16b7d0 100755 --- a/src/buku/install.sh +++ b/src/buku/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='buku' --option version="$VERSION" diff --git a/src/checkov/devcontainer-feature.json b/src/checkov/devcontainer-feature.json index 1a972d445..6580cf858 100644 --- a/src/checkov/devcontainer-feature.json +++ b/src/checkov/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/checkov/install.sh b/src/checkov/install.sh index afcad5bc1..d8db23afb 100755 --- a/src/checkov/install.sh +++ b/src/checkov/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='checkov' --option version="$VERSION" diff --git a/src/cloudinary-cli/devcontainer-feature.json b/src/cloudinary-cli/devcontainer-feature.json index d5c70d393..d0701d901 100644 --- a/src/cloudinary-cli/devcontainer-feature.json +++ b/src/cloudinary-cli/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/cloudinary-cli/install.sh b/src/cloudinary-cli/install.sh index 5df447d5d..a874b7536 100755 --- a/src/cloudinary-cli/install.sh +++ b/src/cloudinary-cli/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cloudinary-cli' --option version="$VERSION" diff --git a/src/cookiecutter/devcontainer-feature.json b/src/cookiecutter/devcontainer-feature.json index 36635771d..7cc475535 100644 --- a/src/cookiecutter/devcontainer-feature.json +++ b/src/cookiecutter/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/cookiecutter/install.sh b/src/cookiecutter/install.sh index 7f9c9fc74..d9b09ce78 100755 --- a/src/cookiecutter/install.sh +++ b/src/cookiecutter/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cookiecutter' --option version="$VERSION" diff --git a/src/copier/devcontainer-feature.json b/src/copier/devcontainer-feature.json index 7e0cda52d..c4bb79bcd 100644 --- a/src/copier/devcontainer-feature.json +++ b/src/copier/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/copier/install.sh b/src/copier/install.sh index a4ccee8ce..dcc7e929c 100755 --- a/src/copier/install.sh +++ b/src/copier/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='copier' --option version="$VERSION" diff --git a/src/coverage-py/devcontainer-feature.json b/src/coverage-py/devcontainer-feature.json index 8cbcfbc43..d07eff497 100644 --- a/src/coverage-py/devcontainer-feature.json +++ b/src/coverage-py/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/coverage-py/install.sh b/src/coverage-py/install.sh index 68f4f01c9..878d1d54f 100755 --- a/src/coverage-py/install.sh +++ b/src/coverage-py/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='coverage' --option version="$VERSION" diff --git a/src/cve-bin-tool/devcontainer-feature.json b/src/cve-bin-tool/devcontainer-feature.json index 5839b0834..a260d7da3 100644 --- a/src/cve-bin-tool/devcontainer-feature.json +++ b/src/cve-bin-tool/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/cve-bin-tool/install.sh b/src/cve-bin-tool/install.sh index cbeb160db..b980f7a24 100755 --- a/src/cve-bin-tool/install.sh +++ b/src/cve-bin-tool/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cve-bin-tool' --option version="$VERSION" diff --git a/src/cyclonedx-python/devcontainer-feature.json b/src/cyclonedx-python/devcontainer-feature.json index 3e20520fb..6486a5866 100644 --- a/src/cyclonedx-python/devcontainer-feature.json +++ b/src/cyclonedx-python/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/cyclonedx-python/install.sh b/src/cyclonedx-python/install.sh index ce815078d..e24366cee 100755 --- a/src/cyclonedx-python/install.sh +++ b/src/cyclonedx-python/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cyclonedx-bom' --option version="$VERSION" diff --git a/src/datasette/devcontainer-feature.json b/src/datasette/devcontainer-feature.json index 898887417..cc291c93c 100644 --- a/src/datasette/devcontainer-feature.json +++ b/src/datasette/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/datasette/install.sh b/src/datasette/install.sh index 68c0457ee..9dfef8d5f 100755 --- a/src/datasette/install.sh +++ b/src/datasette/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='datasette' --option version="$VERSION" diff --git a/src/dbt-coverage/devcontainer-feature.json b/src/dbt-coverage/devcontainer-feature.json index a1912576a..dd5ab4a95 100644 --- a/src/dbt-coverage/devcontainer-feature.json +++ b/src/dbt-coverage/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/dbt-coverage/install.sh b/src/dbt-coverage/install.sh index ef9de66fe..c53add058 100755 --- a/src/dbt-coverage/install.sh +++ b/src/dbt-coverage/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='dbt-coverage' --option version="$VERSION" diff --git a/src/flake8/devcontainer-feature.json b/src/flake8/devcontainer-feature.json index 7cd1f45c4..acd1cb09d 100644 --- a/src/flake8/devcontainer-feature.json +++ b/src/flake8/devcontainer-feature.json @@ -24,7 +24,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/flake8/install.sh b/src/flake8/install.sh index c1bbe06b5..fbbfc4b4f 100755 --- a/src/flake8/install.sh +++ b/src/flake8/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='flake8' --option injections="$PLUGINS" --option version="$VERSION" diff --git a/src/flit/devcontainer-feature.json b/src/flit/devcontainer-feature.json index 56e26803f..1b5293ace 100644 --- a/src/flit/devcontainer-feature.json +++ b/src/flit/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/flit/install.sh b/src/flit/install.sh index fcb67b030..93bf633a7 100755 --- a/src/flit/install.sh +++ b/src/flit/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='flit' --option version="$VERSION" diff --git a/src/gdbgui/devcontainer-feature.json b/src/gdbgui/devcontainer-feature.json index b181a9d96..f260c3029 100644 --- a/src/gdbgui/devcontainer-feature.json +++ b/src/gdbgui/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/gdbgui/install.sh b/src/gdbgui/install.sh index 36fd154c6..4ad9a156a 100755 --- a/src/gdbgui/install.sh +++ b/src/gdbgui/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='gdbgui' --option version="$VERSION" diff --git a/src/glances/devcontainer-feature.json b/src/glances/devcontainer-feature.json index acd27324a..f3f1a9ec8 100644 --- a/src/glances/devcontainer-feature.json +++ b/src/glances/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/glances/install.sh b/src/glances/install.sh index ac3bcbdb1..52688e3f2 100755 --- a/src/glances/install.sh +++ b/src/glances/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='glances[action,browser,cloud,cpuinfo,docker,export,folders,gpu,graph,ip,raid,snmp,web,wifi]' --option version="$VERSION" diff --git a/src/hatch/devcontainer-feature.json b/src/hatch/devcontainer-feature.json index 4f45b83ee..5437132fd 100644 --- a/src/hatch/devcontainer-feature.json +++ b/src/hatch/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/hatch/install.sh b/src/hatch/install.sh index 17fd9594c..df6e83aa1 100755 --- a/src/hatch/install.sh +++ b/src/hatch/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='hatch' --option version="$VERSION" diff --git a/src/invoke/devcontainer-feature.json b/src/invoke/devcontainer-feature.json index cc76f0ed2..a2107f772 100644 --- a/src/invoke/devcontainer-feature.json +++ b/src/invoke/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/invoke/install.sh b/src/invoke/install.sh index dd43b5f90..2468d0828 100755 --- a/src/invoke/install.sh +++ b/src/invoke/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='invoke' --option version="$VERSION" diff --git a/src/isort/devcontainer-feature.json b/src/isort/devcontainer-feature.json index e34de27c2..a634bd50d 100644 --- a/src/isort/devcontainer-feature.json +++ b/src/isort/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/isort/install.sh b/src/isort/install.sh index 877d9d2c2..613ec893b 100755 --- a/src/isort/install.sh +++ b/src/isort/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='isort' --option version="$VERSION" diff --git a/src/jake/devcontainer-feature.json b/src/jake/devcontainer-feature.json index ec94976bd..01d2904c4 100644 --- a/src/jake/devcontainer-feature.json +++ b/src/jake/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/jake/install.sh b/src/jake/install.sh index 4fd643db3..20ba1e388 100755 --- a/src/jake/install.sh +++ b/src/jake/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='jake' --option version="$VERSION" diff --git a/src/jrnl/devcontainer-feature.json b/src/jrnl/devcontainer-feature.json index 87865d36c..d86448b2d 100644 --- a/src/jrnl/devcontainer-feature.json +++ b/src/jrnl/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/jrnl/install.sh b/src/jrnl/install.sh index c9555e437..3824197d4 100755 --- a/src/jrnl/install.sh +++ b/src/jrnl/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='jrnl' --option version="$VERSION" diff --git a/src/keepercommander/devcontainer-feature.json b/src/keepercommander/devcontainer-feature.json index 6da2ae75c..bac5d9de5 100644 --- a/src/keepercommander/devcontainer-feature.json +++ b/src/keepercommander/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/keepercommander/install.sh b/src/keepercommander/install.sh index 1c5220783..f2a9a3faf 100755 --- a/src/keepercommander/install.sh +++ b/src/keepercommander/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='keepercommander' --option version="$VERSION" diff --git a/src/lektor/devcontainer-feature.json b/src/lektor/devcontainer-feature.json index d58740382..2cd85a78c 100644 --- a/src/lektor/devcontainer-feature.json +++ b/src/lektor/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/lektor/install.sh b/src/lektor/install.sh index 25c730b9b..a0a0ee8d4 100755 --- a/src/lektor/install.sh +++ b/src/lektor/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='lektor' --option version="$VERSION" diff --git a/src/linode-cli/devcontainer-feature.json b/src/linode-cli/devcontainer-feature.json index 48e114e7a..24556bf01 100644 --- a/src/linode-cli/devcontainer-feature.json +++ b/src/linode-cli/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/linode-cli/install.sh b/src/linode-cli/install.sh index b5f3a1cc6..b8285a1db 100755 --- a/src/linode-cli/install.sh +++ b/src/linode-cli/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='linode-cli' --option version="$VERSION" diff --git a/src/localstack/devcontainer-feature.json b/src/localstack/devcontainer-feature.json index da597936b..50314651c 100644 --- a/src/localstack/devcontainer-feature.json +++ b/src/localstack/devcontainer-feature.json @@ -16,7 +16,7 @@ }, "installsAfter": [ "ghcr.io/devcontainers-contrib/features/apt-get-packages", - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ], "entrypoint": "/usr/local/share/docker-init.sh", diff --git a/src/localstack/install.sh b/src/localstack/install.sh index 3d1c382fb..b874640b1 100755 --- a/src/localstack/install.sh +++ b/src/localstack/install.sh @@ -38,7 +38,7 @@ $nanolayer_location \ $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.5" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.5" \ --option package='localstack[runtime]' --option version="$VERSION" --option includeDeps='true' --option interpreter='/usr/local/python/3.10.8/bin/python3' diff --git a/src/mackup/devcontainer-feature.json b/src/mackup/devcontainer-feature.json index f1f0c920e..a2076ff42 100644 --- a/src/mackup/devcontainer-feature.json +++ b/src/mackup/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/mackup/install.sh b/src/mackup/install.sh index 31c1a5fd1..11406d8e3 100755 --- a/src/mackup/install.sh +++ b/src/mackup/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mackup' --option version="$VERSION" diff --git a/src/meltano/devcontainer-feature.json b/src/meltano/devcontainer-feature.json index d638f7345..f9b9b2421 100644 --- a/src/meltano/devcontainer-feature.json +++ b/src/meltano/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/meltano/install.sh b/src/meltano/install.sh index aa93e9b58..5faf5444d 100755 --- a/src/meltano/install.sh +++ b/src/meltano/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='meltano' --option version="$VERSION" diff --git a/src/mitmproxy/devcontainer-feature.json b/src/mitmproxy/devcontainer-feature.json index 1c784405f..fb98b276e 100644 --- a/src/mitmproxy/devcontainer-feature.json +++ b/src/mitmproxy/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/mitmproxy/install.sh b/src/mitmproxy/install.sh index ccc2d47c2..78010a704 100755 --- a/src/mitmproxy/install.sh +++ b/src/mitmproxy/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mitmproxy' --option version="$VERSION" diff --git a/src/mkdocs/devcontainer-feature.json b/src/mkdocs/devcontainer-feature.json index 6cc278efc..a9347af95 100644 --- a/src/mkdocs/devcontainer-feature.json +++ b/src/mkdocs/devcontainer-feature.json @@ -23,7 +23,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ], "containerEnv" : {"TZ": "UTC"} diff --git a/src/mkdocs/install.sh b/src/mkdocs/install.sh index 06aac8582..f4d44a742 100755 --- a/src/mkdocs/install.sh +++ b/src/mkdocs/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mkdocs' --option injections="$PLUGINS" --option version="$VERSION" diff --git a/src/mypy/devcontainer-feature.json b/src/mypy/devcontainer-feature.json index 80de5b65c..d887a090c 100644 --- a/src/mypy/devcontainer-feature.json +++ b/src/mypy/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/mypy/install.sh b/src/mypy/install.sh index 148ec6bc9..0da8656d3 100755 --- a/src/mypy/install.sh +++ b/src/mypy/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mypy' --option version="$VERSION" diff --git a/src/nox/devcontainer-feature.json b/src/nox/devcontainer-feature.json index f449eff54..64f680fdc 100644 --- a/src/nox/devcontainer-feature.json +++ b/src/nox/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/nox/install.sh b/src/nox/install.sh index 79936d8d6..34fe7a82b 100755 --- a/src/nox/install.sh +++ b/src/nox/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='nox' --option version="$VERSION" diff --git a/src/pdm/devcontainer-feature.json b/src/pdm/devcontainer-feature.json index 1e71810f3..ca21265df 100644 --- a/src/pdm/devcontainer-feature.json +++ b/src/pdm/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pdm/install.sh b/src/pdm/install.sh index 5c486c001..eb368be4f 100755 --- a/src/pdm/install.sh +++ b/src/pdm/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pdm' --option version="$VERSION" diff --git a/src/pip-audit/devcontainer-feature.json b/src/pip-audit/devcontainer-feature.json index bde9de918..9042b8d9b 100644 --- a/src/pip-audit/devcontainer-feature.json +++ b/src/pip-audit/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pip-audit/install.sh b/src/pip-audit/install.sh index 66ee781a9..09e85e89c 100755 --- a/src/pip-audit/install.sh +++ b/src/pip-audit/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pip-audit' --option version="$VERSION" diff --git a/src/pipenv/devcontainer-feature.json b/src/pipenv/devcontainer-feature.json index 174ad20b0..75914cb14 100644 --- a/src/pipenv/devcontainer-feature.json +++ b/src/pipenv/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pipenv/install.sh b/src/pipenv/install.sh index 867a4680d..64b026327 100755 --- a/src/pipenv/install.sh +++ b/src/pipenv/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pipenv' --option version="$VERSION" diff --git a/src/pipx-package/README.md b/src/pipx-package/README.md index 234d9191f..2f1784ec5 100644 --- a/src/pipx-package/README.md +++ b/src/pipx-package/README.md @@ -7,7 +7,7 @@ Installs a pipx package. ```json "features": { - "ghcr.io/devcontainers-contrib/features/pipx-package:1": {} + "ghcr.io/devcontainers-extra/features/pipx-package:1": {} } ``` diff --git a/src/poetry/devcontainer-feature.json b/src/poetry/devcontainer-feature.json index ac6a34d8e..8c77ef101 100644 --- a/src/poetry/devcontainer-feature.json +++ b/src/poetry/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/poetry/install.sh b/src/poetry/install.sh index 31c6a6216..a66cd0b7b 100755 --- a/src/poetry/install.sh +++ b/src/poetry/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='poetry' --option version="$VERSION" diff --git a/src/pre-commit/devcontainer-feature.json b/src/pre-commit/devcontainer-feature.json index 0d824b9c6..1b984d86f 100644 --- a/src/pre-commit/devcontainer-feature.json +++ b/src/pre-commit/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pre-commit/install.sh b/src/pre-commit/install.sh index 2910df414..13db22769 100755 --- a/src/pre-commit/install.sh +++ b/src/pre-commit/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pre-commit' --option version="$VERSION" diff --git a/src/pyinfra/devcontainer-feature.json b/src/pyinfra/devcontainer-feature.json index b4efc7dc9..0059efd2c 100644 --- a/src/pyinfra/devcontainer-feature.json +++ b/src/pyinfra/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pyinfra/install.sh b/src/pyinfra/install.sh index 8e8365cd1..c12dc7c51 100755 --- a/src/pyinfra/install.sh +++ b/src/pyinfra/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyinfra' --option version="$VERSION" diff --git a/src/pylint/devcontainer-feature.json b/src/pylint/devcontainer-feature.json index ba48e5449..ccbcc69d0 100644 --- a/src/pylint/devcontainer-feature.json +++ b/src/pylint/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pylint/install.sh b/src/pylint/install.sh index 1a9390c64..03eb318df 100755 --- a/src/pylint/install.sh +++ b/src/pylint/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pylint' --option version="$VERSION" diff --git a/src/pyoxidizer/devcontainer-feature.json b/src/pyoxidizer/devcontainer-feature.json index 659c60696..8d417a24d 100644 --- a/src/pyoxidizer/devcontainer-feature.json +++ b/src/pyoxidizer/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pyoxidizer/install.sh b/src/pyoxidizer/install.sh index 51250bf8c..ee74f4da7 100755 --- a/src/pyoxidizer/install.sh +++ b/src/pyoxidizer/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyoxidizer' --option version="$VERSION" diff --git a/src/pyscaffold/devcontainer-feature.json b/src/pyscaffold/devcontainer-feature.json index 282df28a2..a8507edbe 100644 --- a/src/pyscaffold/devcontainer-feature.json +++ b/src/pyscaffold/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/pyscaffold/install.sh b/src/pyscaffold/install.sh index 3196f1ba4..398772c9c 100755 --- a/src/pyscaffold/install.sh +++ b/src/pyscaffold/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyscaffold[all]' --option version="$VERSION" diff --git a/src/qrcode/devcontainer-feature.json b/src/qrcode/devcontainer-feature.json index 7d7e5d329..8ce4216c7 100644 --- a/src/qrcode/devcontainer-feature.json +++ b/src/qrcode/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/qrcode/install.sh b/src/qrcode/install.sh index c841de242..f6e89e885 100755 --- a/src/qrcode/install.sh +++ b/src/qrcode/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='qrcode[pil]' --option version="$VERSION" diff --git a/src/ruff/devcontainer-feature.json b/src/ruff/devcontainer-feature.json index 9956ac204..d95142554 100644 --- a/src/ruff/devcontainer-feature.json +++ b/src/ruff/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/ruff/install.sh b/src/ruff/install.sh index 1e2bb2c86..1f3d07436 100755 --- a/src/ruff/install.sh +++ b/src/ruff/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='ruff' --option version="$VERSION" diff --git a/src/scancode-toolkit/devcontainer-feature.json b/src/scancode-toolkit/devcontainer-feature.json index 0724a8a0d..b12f624ee 100644 --- a/src/scancode-toolkit/devcontainer-feature.json +++ b/src/scancode-toolkit/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/scancode-toolkit/install.sh b/src/scancode-toolkit/install.sh index d71f189af..e3e3f163d 100755 --- a/src/scancode-toolkit/install.sh +++ b/src/scancode-toolkit/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='scancode-toolkit' --option version="$VERSION" diff --git a/src/sigstore-python/devcontainer-feature.json b/src/sigstore-python/devcontainer-feature.json index d77efcd41..1a813fd5c 100644 --- a/src/sigstore-python/devcontainer-feature.json +++ b/src/sigstore-python/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/sigstore-python/install.sh b/src/sigstore-python/install.sh index 5ba80c273..bbd2e1917 100755 --- a/src/sigstore-python/install.sh +++ b/src/sigstore-python/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='sigstore' --option version="$VERSION" diff --git a/src/sqlfluff/devcontainer-feature.json b/src/sqlfluff/devcontainer-feature.json index b582e6f98..f4bb7a80f 100644 --- a/src/sqlfluff/devcontainer-feature.json +++ b/src/sqlfluff/devcontainer-feature.json @@ -24,7 +24,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/sqlfluff/install.sh b/src/sqlfluff/install.sh index 8f351e5fe..fc1d1d58d 100755 --- a/src/sqlfluff/install.sh +++ b/src/sqlfluff/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.5" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='sqlfluff' --option injections="$PLUGINS" --option version="$VERSION" diff --git a/src/sqlfmt/devcontainer-feature.json b/src/sqlfmt/devcontainer-feature.json index 4242ad162..08ecddbf9 100644 --- a/src/sqlfmt/devcontainer-feature.json +++ b/src/sqlfmt/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/sqlfmt/install.sh b/src/sqlfmt/install.sh index 70bc42b5e..064fcd8bf 100755 --- a/src/sqlfmt/install.sh +++ b/src/sqlfmt/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='shandy-sqlfmt[jinjafmt]' --option version="$VERSION" diff --git a/src/tox/devcontainer-feature.json b/src/tox/devcontainer-feature.json index 2667fa5be..f6ba5a927 100644 --- a/src/tox/devcontainer-feature.json +++ b/src/tox/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/tox/install.sh b/src/tox/install.sh index b06a844a0..c34100f09 100755 --- a/src/tox/install.sh +++ b/src/tox/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='tox' --option version="$VERSION" diff --git a/src/twine/devcontainer-feature.json b/src/twine/devcontainer-feature.json index f52ed9bc6..3290f3c85 100644 --- a/src/twine/devcontainer-feature.json +++ b/src/twine/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/twine/install.sh b/src/twine/install.sh index d96529e0d..5bbaf0e76 100755 --- a/src/twine/install.sh +++ b/src/twine/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='twine' --option version="$VERSION" diff --git a/src/ufmt/devcontainer-feature.json b/src/ufmt/devcontainer-feature.json index e0d2a3a08..bd0fc3635 100644 --- a/src/ufmt/devcontainer-feature.json +++ b/src/ufmt/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } diff --git a/src/ufmt/install.sh b/src/ufmt/install.sh index 9ce8bd17f..3f03b38df 100755 --- a/src/ufmt/install.sh +++ b/src/ufmt/install.sh @@ -15,7 +15,7 @@ ensure_nanolayer nanolayer_location "v0.4.45" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.6" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.6" \ --option package='ufmt' --option version="$VERSION" diff --git a/src/vulture/devcontainer-feature.json b/src/vulture/devcontainer-feature.json index 166d1c11d..4e632af76 100644 --- a/src/vulture/devcontainer-feature.json +++ b/src/vulture/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/vulture/install.sh b/src/vulture/install.sh index fcd6951a8..a84afc9bd 100755 --- a/src/vulture/install.sh +++ b/src/vulture/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='vulture' --option version="$VERSION" diff --git a/src/xonsh/devcontainer-feature.json b/src/xonsh/devcontainer-feature.json index f7ea6117a..3eac1298c 100644 --- a/src/xonsh/devcontainer-feature.json +++ b/src/xonsh/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/xonsh/install.sh b/src/xonsh/install.sh index 86a1e904f..732f6fe77 100755 --- a/src/xonsh/install.sh +++ b/src/xonsh/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='xonsh[all]' --option version="$VERSION" diff --git a/src/yamllint/devcontainer-feature.json b/src/yamllint/devcontainer-feature.json index 1f07d3753..a883b997a 100644 --- a/src/yamllint/devcontainer-feature.json +++ b/src/yamllint/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/yamllint/install.sh b/src/yamllint/install.sh index 3b1b3af3c..484aa3a67 100755 --- a/src/yamllint/install.sh +++ b/src/yamllint/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yamllint' --option version="$VERSION" diff --git a/src/yapf/devcontainer-feature.json b/src/yapf/devcontainer-feature.json index 4c9d6e6b7..3c413d4b2 100644 --- a/src/yapf/devcontainer-feature.json +++ b/src/yapf/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/yapf/install.sh b/src/yapf/install.sh index b38f4a770..bc683713b 100755 --- a/src/yapf/install.sh +++ b/src/yapf/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yapf' --option version="$VERSION" diff --git a/src/youtube-dl/devcontainer-feature.json b/src/youtube-dl/devcontainer-feature.json index 07c47b60e..bc4435acd 100644 --- a/src/youtube-dl/devcontainer-feature.json +++ b/src/youtube-dl/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/youtube-dl/install.sh b/src/youtube-dl/install.sh index a5c9e821d..7c53a1e9a 100755 --- a/src/youtube-dl/install.sh +++ b/src/youtube-dl/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='youtube-dl' --option version="$VERSION" diff --git a/src/yt-dlp/devcontainer-feature.json b/src/yt-dlp/devcontainer-feature.json index 0c8c9759a..7e0be8620 100644 --- a/src/yt-dlp/devcontainer-feature.json +++ b/src/yt-dlp/devcontainer-feature.json @@ -15,7 +15,7 @@ } }, "installsAfter": [ - "ghcr.io/devcontainers-contrib/features/pipx-package", + "ghcr.io/devcontainers-extra/features/pipx-package", "ghcr.io/devcontainers/features/python" ] } \ No newline at end of file diff --git a/src/yt-dlp/install.sh b/src/yt-dlp/install.sh index e1ea4d8c1..58ea66a6b 100755 --- a/src/yt-dlp/install.sh +++ b/src/yt-dlp/install.sh @@ -14,7 +14,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/pipx-package:1.1.7" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yt-dlp' --option version="$VERSION" From 5146ec3772c822d2dda51a06270ddb8035c53db6 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 07:35:35 +0000 Subject: [PATCH 07/38] fix(brownie): remove redundant installation steps --- src/brownie/README.md | 0 src/brownie/devcontainer-feature.json | 0 src/brownie/install.sh | 25 ++----------------------- src/brownie/library_scripts.sh | 0 4 files changed, 2 insertions(+), 23 deletions(-) mode change 100644 => 100755 src/brownie/README.md mode change 100644 => 100755 src/brownie/devcontainer-feature.json mode change 100644 => 100755 src/brownie/library_scripts.sh diff --git a/src/brownie/README.md b/src/brownie/README.md old mode 100644 new mode 100755 diff --git a/src/brownie/devcontainer-feature.json b/src/brownie/devcontainer-feature.json old mode 100644 new mode 100755 diff --git a/src/brownie/install.sh b/src/brownie/install.sh index ce9a24313..19dd94f0c 100755 --- a/src/brownie/install.sh +++ b/src/brownie/install.sh @@ -1,39 +1,18 @@ - set -e . ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" - -$nanolayer_location \ - install \ - devcontainer-feature \ - "ghcr.io/devcontainers/features/python:1.0.18" \ - --option installTools='false' --option version='os-provided' - - - -$nanolayer_location \ - install \ - devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ - --option command='pip3 install packaging==21.3' - - - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='eth-brownie' --option version="$VERSION" - - echo 'Done!' - diff --git a/src/brownie/library_scripts.sh b/src/brownie/library_scripts.sh old mode 100644 new mode 100755 From 349e0f87fb429b6a51b3a42fb5a7e71b75b1bf28 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 08:22:11 +0000 Subject: [PATCH 08/38] feat: add justfile with test command --- .devcontainer/devcontainer.json | 3 ++- justfile | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 justfile diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 8f73eea86..03055d020 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -16,7 +16,8 @@ "ghcr.io/devcontainers/features/github-cli:1.0.13": {}, "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": {}, "ghcr.io/devcontainers/features/common-utils:2.5.1": {}, - "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {} + "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {}, + "ghcr.io/guiyomh/features/just:0": {} }, "postCreateCommand": "/bin/bash -ex ./.devcontainer/setup.sh > postCreateCommand.log" } \ No newline at end of file diff --git a/justfile b/justfile new file mode 100644 index 000000000..12efc8465 --- /dev/null +++ b/justfile @@ -0,0 +1,2 @@ +test feature-name: + devcontainer features test -f {{feature-name}} --skip-autogenerated \ No newline at end of file From 6c1b531d52ec6c3c95cc0cf30c308d5d436eeef9 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 08:37:25 +0000 Subject: [PATCH 09/38] fix(ufmt): bump pipx-package version --- src/ufmt/install.sh | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/ufmt/install.sh b/src/ufmt/install.sh index 3f03b38df..0c71c3eb8 100755 --- a/src/ufmt/install.sh +++ b/src/ufmt/install.sh @@ -6,19 +6,15 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" - $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-extra/features/pipx-package:1.1.6" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='ufmt' --option version="$VERSION" - - echo 'Done!' - From a55577b3ec167ed25e5205536b7480fbf4d27a15 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 09:36:22 +0000 Subject: [PATCH 10/38] fix(localstack): add system deps and bump version of pipx-package --- src/localstack/install.sh | 23 +++++------------------ 1 file changed, 5 insertions(+), 18 deletions(-) diff --git a/src/localstack/install.sh b/src/localstack/install.sh index b874640b1..8210d5bac 100755 --- a/src/localstack/install.sh +++ b/src/localstack/install.sh @@ -1,55 +1,42 @@ - -set -e +set -xe . ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ - --option packages='build-essential,libsasl2-dev,g++' - - + --option packages='build-essential,libsasl2-dev,g++,qemu-system,libvirt-daemon-system,libvirt-dev' $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers/features/docker-in-docker:2.1.0" \ --option installDockerBuildx='false' - - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers/features/python:1.1.0" \ --option installTools='false' --option OVERRIDEDEFAULTVERSION='false' --option version='3.10.8' - - $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-extra/features/pipx-package:1.1.5" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='localstack[runtime]' --option version="$VERSION" --option includeDeps='true' --option interpreter='/usr/local/python/3.10.8/bin/python3' - - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='mkdir -p /var/lib/localstack && chown -R $_REMOTE_USER /var/lib/localstack && chgrp -R docker /var/lib/localstack && chmod -R 775 /var/lib/localstack' - - echo 'Done!' - From 41cee279962179e861df5328d820b58dac745a63 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 08:22:11 +0000 Subject: [PATCH 11/38] feat: add justfile with test command --- .devcontainer/devcontainer.json | 24 ++++++++++++------------ justfile | 2 ++ 2 files changed, 14 insertions(+), 12 deletions(-) create mode 100644 justfile diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4ff14364a..03055d020 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,21 +3,21 @@ "customizations": { "vscode": { "extensions": [ - "mads-hartmann.bash-ide-vscode", - "ms-python.python", - "ms-python.vscode-pylance", - "DavidAnson.vscode-markdownlint" - ] + "mads-hartmann.bash-ide-vscode", + "ms-python.python", + "ms-python.vscode-pylance", + "DavidAnson.vscode-markdownlint" + ] } }, "features": { - "ghcr.io/devcontainers/features/python:1.1.0": {}, - "ghcr.io/devcontainers/features/node:1.3.0": {}, - "ghcr.io/devcontainers/features/github-cli:1.0.10": {}, - "ghcr.io/devcontainers/features/docker-in-docker:2.5.0": {}, - "ghcr.io/devcontainers/features/common-utils:2.1.2": {}, - "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {} - + "ghcr.io/devcontainers/features/python:1.6.4": {}, + "ghcr.io/devcontainers/features/node:1.6.0": {}, + "ghcr.io/devcontainers/features/github-cli:1.0.13": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": {}, + "ghcr.io/devcontainers/features/common-utils:2.5.1": {}, + "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {}, + "ghcr.io/guiyomh/features/just:0": {} }, "postCreateCommand": "/bin/bash -ex ./.devcontainer/setup.sh > postCreateCommand.log" } \ No newline at end of file diff --git a/justfile b/justfile new file mode 100644 index 000000000..12efc8465 --- /dev/null +++ b/justfile @@ -0,0 +1,2 @@ +test feature-name: + devcontainer features test -f {{feature-name}} --skip-autogenerated \ No newline at end of file From 0f5ae9bf862c78d78932bcfafd26c63823ad19f4 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 09:47:12 +0000 Subject: [PATCH 12/38] fix(netdata): update URL for installation script --- src/netdata/install.sh | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/netdata/install.sh b/src/netdata/install.sh index a2e30527a..576185a49 100755 --- a/src/netdata/install.sh +++ b/src/netdata/install.sh @@ -6,18 +6,16 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" - $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.7" \ - - - -curl https://my-netdata.io/kickstart.sh > /tmp/netdata-kickstart.sh && sh /tmp/netdata-kickstart.sh --no-updates --stable-channel --disable-telemetry && rm /tmp/netdata-kickstart.sh + "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.7" +curl https://get.netdata.cloud/kickstart.sh >/tmp/netdata-kickstart.sh && + sh /tmp/netdata-kickstart.sh --no-updates --stable-channel --disable-telemetry && + rm /tmp/netdata-kickstart.sh From cadfcf05e2bd958330f8f1d1f645bafc2a2772a3 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:18:50 +0000 Subject: [PATCH 13/38] fix(azure-apiops): update regex for assets --- src/azure-apiops/install.sh | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/src/azure-apiops/install.sh b/src/azure-apiops/install.sh index 87df867b1..8122e7e1d 100755 --- a/src/azure-apiops/install.sh +++ b/src/azure-apiops/install.sh @@ -1,31 +1,24 @@ - set -e . ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ - --option repo='Azure/apiops' --option binaryNames='extractor' --option assetRegex='^extractor.linux-x64.exe' --option version="$VERSION" - - + --option repo='Azure/apiops' --option binaryNames='extractor' --option assetRegex='^extractor-linux-x64.zip' --option version="$VERSION" $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ - --option repo='Azure/apiops' --option binaryNames='publisher' --option assetRegex='^publisher.linux-x64.exe' --option version="$VERSION" - - + --option repo='Azure/apiops' --option binaryNames='publisher' --option assetRegex='^publisher-linux-x64.zip' --option version="$VERSION" echo 'Done!' - From f2a16cef4958f1418c0080724ec18c7731f5e1e8 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 14 Sep 2024 07:56:04 +0000 Subject: [PATCH 14/38] feat: move failing feautres to archive --- {src => archive}/actions-runner-noexternals/README.md | 0 .../actions-runner-noexternals/devcontainer-feature.json | 0 {src => archive}/actions-runner-noexternals/install.sh | 0 {src => archive}/actions-runner-noexternals/library_scripts.sh | 0 {src => archive}/actions-runner-noruntime-noexternals/README.md | 0 .../devcontainer-feature.json | 0 {src => archive}/actions-runner-noruntime-noexternals/install.sh | 0 .../actions-runner-noruntime-noexternals/library_scripts.sh | 0 {src => archive}/actions-runner-noruntime/README.md | 0 .../actions-runner-noruntime/devcontainer-feature.json | 0 {src => archive}/actions-runner-noruntime/install.sh | 0 {src => archive}/actions-runner-noruntime/library_scripts.sh | 0 {src => archive}/age-keygen/README.md | 0 {src => archive}/age-keygen/devcontainer-feature.json | 0 {src => archive}/age-keygen/install.sh | 0 {src => archive}/age-keygen/library_scripts.sh | 0 {src => archive}/age/README.md | 0 {src => archive}/age/devcontainer-feature.json | 0 {src => archive}/age/install.sh | 0 {src => archive}/age/library_scripts.sh | 0 {src => archive}/btm/README.md | 0 {src => archive}/btm/devcontainer-feature.json | 0 {src => archive}/btm/install.sh | 0 {src => archive}/btm/library_scripts.sh | 0 {src => archive}/cert-manager/README.md | 0 {src => archive}/cert-manager/devcontainer-feature.json | 0 {src => archive}/cert-manager/install.sh | 0 {src => archive}/cert-manager/library_scripts.sh | 0 {src => archive}/cmctl-asdf/README.md | 0 {src => archive}/cmctl-asdf/devcontainer-feature.json | 0 {src => archive}/cmctl-asdf/install.sh | 0 {src => archive}/cmctl-asdf/library_scripts.sh | 0 {src => archive}/croc/README.md | 0 {src => archive}/croc/devcontainer-feature.json | 0 {src => archive}/croc/install.sh | 0 {src => archive}/croc/library_scripts.sh | 0 {src => archive}/cue-asdf/README.md | 0 {src => archive}/cue-asdf/devcontainer-feature.json | 0 {src => archive}/cue-asdf/install.sh | 0 {src => archive}/cue-asdf/library_scripts.sh | 0 {src => archive}/edge-impulse-cli/README.md | 0 {src => archive}/edge-impulse-cli/devcontainer-feature.json | 0 {src => archive}/edge-impulse-cli/install.sh | 0 {src => archive}/edge-impulse-cli/library_scripts.sh | 0 {src => archive}/elixir-asdf/README.md | 0 {src => archive}/elixir-asdf/devcontainer-feature.json | 0 {src => archive}/elixir-asdf/install.sh | 0 {src => archive}/elixir-asdf/library_scripts.sh | 0 {src => archive}/erlang-asdf/README.md | 0 {src => archive}/erlang-asdf/devcontainer-feature.json | 0 {src => archive}/erlang-asdf/install.sh | 0 {src => archive}/erlang-asdf/library_scripts.sh | 0 {src => archive}/ffmpeg-homebrew/README.md | 0 {src => archive}/ffmpeg-homebrew/devcontainer-feature.json | 0 {src => archive}/ffmpeg-homebrew/install.sh | 0 {src => archive}/ffmpeg-homebrew/library_scripts.sh | 0 {src => archive}/gh-release/README.md | 0 {src => archive}/gh-release/devcontainer-feature.json | 0 {src => archive}/gh-release/install.sh | 0 {src => archive}/gh-release/library_scripts.sh | 0 {src => archive}/homebrew-package/README.md | 0 {src => archive}/homebrew-package/devcontainer-feature.json | 0 {src => archive}/homebrew-package/install.sh | 0 {src => archive}/homebrew-package/library_scripts.sh | 0 {src => archive}/ko/README.md | 0 {src => archive}/ko/devcontainer-feature.json | 0 {src => archive}/ko/install.sh | 0 {src => archive}/ko/library_scripts.sh | 0 {src => archive}/kubescape/README.md | 0 {src => archive}/kubescape/devcontainer-feature.json | 0 {src => archive}/kubescape/install.sh | 0 {src => archive}/kubescape/library_scripts.sh | 0 {src => archive}/mage/README.md | 0 {src => archive}/mage/devcontainer-feature.json | 0 {src => archive}/mage/install.sh | 0 {src => archive}/mage/library_scripts.sh | 0 {src => archive}/meson-asdf/README.md | 0 {src => archive}/meson-asdf/devcontainer-feature.json | 0 {src => archive}/meson-asdf/install.sh | 0 {src => archive}/meson-asdf/library_scripts.sh | 0 {src => archive}/meteor-cli/README.md | 0 {src => archive}/meteor-cli/devcontainer-feature.json | 0 {src => archive}/meteor-cli/install.sh | 0 {src => archive}/meteor-cli/library_scripts.sh | 0 {src => archive}/nushell/README.md | 0 {src => archive}/nushell/devcontainer-feature.json | 0 {src => archive}/nushell/install.sh | 0 {src => archive}/nushell/library_scripts.sh | 0 {src => archive}/ory-keto/README.md | 0 {src => archive}/ory-keto/devcontainer-feature.json | 0 {src => archive}/ory-keto/install.sh | 0 {src => archive}/ory-keto/library_scripts.sh | 0 {src => archive}/pip-audit/README.md | 0 {src => archive}/pip-audit/devcontainer-feature.json | 0 {src => archive}/pip-audit/install.sh | 0 {src => archive}/pip-audit/library_scripts.sh | 0 {src => archive}/porter/README.md | 0 {src => archive}/porter/devcontainer-feature.json | 0 {src => archive}/porter/install.sh | 0 {src => archive}/porter/library_scripts.sh | 0 {src => archive}/postgres-asdf/README.md | 0 {src => archive}/postgres-asdf/devcontainer-feature.json | 0 {src => archive}/postgres-asdf/install.sh | 0 {src => archive}/postgres-asdf/library_scripts.sh | 0 {src => archive}/rabbitmq-asdf/README.md | 0 {src => archive}/rabbitmq-asdf/devcontainer-feature.json | 0 {src => archive}/rabbitmq-asdf/install.sh | 0 {src => archive}/rabbitmq-asdf/library_scripts.sh | 0 {src => archive}/scala-asdf/README.md | 0 {src => archive}/scala-asdf/devcontainer-feature.json | 0 {src => archive}/scala-asdf/install.sh | 0 {src => archive}/scala-asdf/library_scripts.sh | 0 {src => archive}/serverless/README.md | 0 {src => archive}/serverless/devcontainer-feature.json | 0 {src => archive}/serverless/install.sh | 0 {src => archive}/serverless/library_scripts.sh | 0 {src => archive}/sqlfmt/README.md | 0 {src => archive}/sqlfmt/devcontainer-feature.json | 0 {src => archive}/sqlfmt/install.sh | 0 {src => archive}/sqlfmt/library_scripts.sh | 0 {src => archive}/syncthing/README.md | 0 {src => archive}/syncthing/devcontainer-feature.json | 0 {src => archive}/syncthing/install.sh | 0 {src => archive}/syncthing/library_scripts.sh | 0 {src => archive}/trivy/README.md | 0 {src => archive}/trivy/devcontainer-feature.json | 0 {src => archive}/trivy/install.sh | 0 {src => archive}/trivy/library_scripts.sh | 0 {src => archive}/vertx-sdkman/README.md | 0 {src => archive}/vertx-sdkman/devcontainer-feature.json | 0 {src => archive}/vertx-sdkman/install.sh | 0 {src => archive}/vertx-sdkman/library_scripts.sh | 0 {src => archive}/visualvm-sdkman/README.md | 0 {src => archive}/visualvm-sdkman/devcontainer-feature.json | 0 {src => archive}/visualvm-sdkman/install.sh | 0 {src => archive}/visualvm-sdkman/library_scripts.sh | 0 {src => archive}/volta/README.md | 0 {src => archive}/volta/devcontainer-feature.json | 0 {src => archive}/volta/install.sh | 0 {src => archive}/volta/library_scripts.sh | 0 {src => archive}/xplr/README.md | 0 {src => archive}/xplr/devcontainer-feature.json | 0 {src => archive}/xplr/install.sh | 0 {src => archive}/xplr/library_scripts.sh | 0 144 files changed, 0 insertions(+), 0 deletions(-) rename {src => archive}/actions-runner-noexternals/README.md (100%) rename {src => archive}/actions-runner-noexternals/devcontainer-feature.json (100%) rename {src => archive}/actions-runner-noexternals/install.sh (100%) rename {src => archive}/actions-runner-noexternals/library_scripts.sh (100%) rename {src => archive}/actions-runner-noruntime-noexternals/README.md (100%) rename {src => archive}/actions-runner-noruntime-noexternals/devcontainer-feature.json (100%) rename {src => archive}/actions-runner-noruntime-noexternals/install.sh (100%) rename {src => archive}/actions-runner-noruntime-noexternals/library_scripts.sh (100%) rename {src => archive}/actions-runner-noruntime/README.md (100%) rename {src => archive}/actions-runner-noruntime/devcontainer-feature.json (100%) rename {src => archive}/actions-runner-noruntime/install.sh (100%) rename {src => archive}/actions-runner-noruntime/library_scripts.sh (100%) rename {src => archive}/age-keygen/README.md (100%) rename {src => archive}/age-keygen/devcontainer-feature.json (100%) rename {src => archive}/age-keygen/install.sh (100%) rename {src => archive}/age-keygen/library_scripts.sh (100%) rename {src => archive}/age/README.md (100%) rename {src => archive}/age/devcontainer-feature.json (100%) rename {src => archive}/age/install.sh (100%) rename {src => archive}/age/library_scripts.sh (100%) rename {src => archive}/btm/README.md (100%) rename {src => archive}/btm/devcontainer-feature.json (100%) rename {src => archive}/btm/install.sh (100%) rename {src => archive}/btm/library_scripts.sh (100%) rename {src => archive}/cert-manager/README.md (100%) rename {src => archive}/cert-manager/devcontainer-feature.json (100%) rename {src => archive}/cert-manager/install.sh (100%) rename {src => archive}/cert-manager/library_scripts.sh (100%) rename {src => archive}/cmctl-asdf/README.md (100%) rename {src => archive}/cmctl-asdf/devcontainer-feature.json (100%) rename {src => archive}/cmctl-asdf/install.sh (100%) rename {src => archive}/cmctl-asdf/library_scripts.sh (100%) rename {src => archive}/croc/README.md (100%) rename {src => archive}/croc/devcontainer-feature.json (100%) rename {src => archive}/croc/install.sh (100%) rename {src => archive}/croc/library_scripts.sh (100%) rename {src => archive}/cue-asdf/README.md (100%) rename {src => archive}/cue-asdf/devcontainer-feature.json (100%) rename {src => archive}/cue-asdf/install.sh (100%) rename {src => archive}/cue-asdf/library_scripts.sh (100%) rename {src => archive}/edge-impulse-cli/README.md (100%) rename {src => archive}/edge-impulse-cli/devcontainer-feature.json (100%) rename {src => archive}/edge-impulse-cli/install.sh (100%) rename {src => archive}/edge-impulse-cli/library_scripts.sh (100%) rename {src => archive}/elixir-asdf/README.md (100%) rename {src => archive}/elixir-asdf/devcontainer-feature.json (100%) rename {src => archive}/elixir-asdf/install.sh (100%) rename {src => archive}/elixir-asdf/library_scripts.sh (100%) rename {src => archive}/erlang-asdf/README.md (100%) rename {src => archive}/erlang-asdf/devcontainer-feature.json (100%) rename {src => archive}/erlang-asdf/install.sh (100%) rename {src => archive}/erlang-asdf/library_scripts.sh (100%) rename {src => archive}/ffmpeg-homebrew/README.md (100%) rename {src => archive}/ffmpeg-homebrew/devcontainer-feature.json (100%) rename {src => archive}/ffmpeg-homebrew/install.sh (100%) rename {src => archive}/ffmpeg-homebrew/library_scripts.sh (100%) rename {src => archive}/gh-release/README.md (100%) rename {src => archive}/gh-release/devcontainer-feature.json (100%) rename {src => archive}/gh-release/install.sh (100%) rename {src => archive}/gh-release/library_scripts.sh (100%) rename {src => archive}/homebrew-package/README.md (100%) rename {src => archive}/homebrew-package/devcontainer-feature.json (100%) rename {src => archive}/homebrew-package/install.sh (100%) rename {src => archive}/homebrew-package/library_scripts.sh (100%) rename {src => archive}/ko/README.md (100%) rename {src => archive}/ko/devcontainer-feature.json (100%) rename {src => archive}/ko/install.sh (100%) rename {src => archive}/ko/library_scripts.sh (100%) rename {src => archive}/kubescape/README.md (100%) rename {src => archive}/kubescape/devcontainer-feature.json (100%) rename {src => archive}/kubescape/install.sh (100%) rename {src => archive}/kubescape/library_scripts.sh (100%) rename {src => archive}/mage/README.md (100%) rename {src => archive}/mage/devcontainer-feature.json (100%) rename {src => archive}/mage/install.sh (100%) rename {src => archive}/mage/library_scripts.sh (100%) rename {src => archive}/meson-asdf/README.md (100%) rename {src => archive}/meson-asdf/devcontainer-feature.json (100%) rename {src => archive}/meson-asdf/install.sh (100%) rename {src => archive}/meson-asdf/library_scripts.sh (100%) rename {src => archive}/meteor-cli/README.md (100%) rename {src => archive}/meteor-cli/devcontainer-feature.json (100%) rename {src => archive}/meteor-cli/install.sh (100%) rename {src => archive}/meteor-cli/library_scripts.sh (100%) rename {src => archive}/nushell/README.md (100%) rename {src => archive}/nushell/devcontainer-feature.json (100%) rename {src => archive}/nushell/install.sh (100%) rename {src => archive}/nushell/library_scripts.sh (100%) rename {src => archive}/ory-keto/README.md (100%) rename {src => archive}/ory-keto/devcontainer-feature.json (100%) rename {src => archive}/ory-keto/install.sh (100%) rename {src => archive}/ory-keto/library_scripts.sh (100%) rename {src => archive}/pip-audit/README.md (100%) rename {src => archive}/pip-audit/devcontainer-feature.json (100%) rename {src => archive}/pip-audit/install.sh (100%) rename {src => archive}/pip-audit/library_scripts.sh (100%) rename {src => archive}/porter/README.md (100%) rename {src => archive}/porter/devcontainer-feature.json (100%) rename {src => archive}/porter/install.sh (100%) rename {src => archive}/porter/library_scripts.sh (100%) rename {src => archive}/postgres-asdf/README.md (100%) rename {src => archive}/postgres-asdf/devcontainer-feature.json (100%) rename {src => archive}/postgres-asdf/install.sh (100%) rename {src => archive}/postgres-asdf/library_scripts.sh (100%) rename {src => archive}/rabbitmq-asdf/README.md (100%) rename {src => archive}/rabbitmq-asdf/devcontainer-feature.json (100%) rename {src => archive}/rabbitmq-asdf/install.sh (100%) rename {src => archive}/rabbitmq-asdf/library_scripts.sh (100%) rename {src => archive}/scala-asdf/README.md (100%) rename {src => archive}/scala-asdf/devcontainer-feature.json (100%) rename {src => archive}/scala-asdf/install.sh (100%) rename {src => archive}/scala-asdf/library_scripts.sh (100%) rename {src => archive}/serverless/README.md (100%) rename {src => archive}/serverless/devcontainer-feature.json (100%) rename {src => archive}/serverless/install.sh (100%) rename {src => archive}/serverless/library_scripts.sh (100%) rename {src => archive}/sqlfmt/README.md (100%) rename {src => archive}/sqlfmt/devcontainer-feature.json (100%) rename {src => archive}/sqlfmt/install.sh (100%) rename {src => archive}/sqlfmt/library_scripts.sh (100%) rename {src => archive}/syncthing/README.md (100%) rename {src => archive}/syncthing/devcontainer-feature.json (100%) rename {src => archive}/syncthing/install.sh (100%) rename {src => archive}/syncthing/library_scripts.sh (100%) rename {src => archive}/trivy/README.md (100%) rename {src => archive}/trivy/devcontainer-feature.json (100%) rename {src => archive}/trivy/install.sh (100%) rename {src => archive}/trivy/library_scripts.sh (100%) rename {src => archive}/vertx-sdkman/README.md (100%) rename {src => archive}/vertx-sdkman/devcontainer-feature.json (100%) rename {src => archive}/vertx-sdkman/install.sh (100%) rename {src => archive}/vertx-sdkman/library_scripts.sh (100%) rename {src => archive}/visualvm-sdkman/README.md (100%) rename {src => archive}/visualvm-sdkman/devcontainer-feature.json (100%) rename {src => archive}/visualvm-sdkman/install.sh (100%) rename {src => archive}/visualvm-sdkman/library_scripts.sh (100%) rename {src => archive}/volta/README.md (100%) rename {src => archive}/volta/devcontainer-feature.json (100%) rename {src => archive}/volta/install.sh (100%) rename {src => archive}/volta/library_scripts.sh (100%) rename {src => archive}/xplr/README.md (100%) rename {src => archive}/xplr/devcontainer-feature.json (100%) rename {src => archive}/xplr/install.sh (100%) rename {src => archive}/xplr/library_scripts.sh (100%) diff --git a/src/actions-runner-noexternals/README.md b/archive/actions-runner-noexternals/README.md similarity index 100% rename from src/actions-runner-noexternals/README.md rename to archive/actions-runner-noexternals/README.md diff --git a/src/actions-runner-noexternals/devcontainer-feature.json b/archive/actions-runner-noexternals/devcontainer-feature.json similarity index 100% rename from src/actions-runner-noexternals/devcontainer-feature.json rename to archive/actions-runner-noexternals/devcontainer-feature.json diff --git a/src/actions-runner-noexternals/install.sh b/archive/actions-runner-noexternals/install.sh similarity index 100% rename from src/actions-runner-noexternals/install.sh rename to archive/actions-runner-noexternals/install.sh diff --git a/src/actions-runner-noexternals/library_scripts.sh b/archive/actions-runner-noexternals/library_scripts.sh similarity index 100% rename from src/actions-runner-noexternals/library_scripts.sh rename to archive/actions-runner-noexternals/library_scripts.sh diff --git a/src/actions-runner-noruntime-noexternals/README.md b/archive/actions-runner-noruntime-noexternals/README.md similarity index 100% rename from src/actions-runner-noruntime-noexternals/README.md rename to archive/actions-runner-noruntime-noexternals/README.md diff --git a/src/actions-runner-noruntime-noexternals/devcontainer-feature.json b/archive/actions-runner-noruntime-noexternals/devcontainer-feature.json similarity index 100% rename from src/actions-runner-noruntime-noexternals/devcontainer-feature.json rename to archive/actions-runner-noruntime-noexternals/devcontainer-feature.json diff --git a/src/actions-runner-noruntime-noexternals/install.sh b/archive/actions-runner-noruntime-noexternals/install.sh similarity index 100% rename from src/actions-runner-noruntime-noexternals/install.sh rename to archive/actions-runner-noruntime-noexternals/install.sh diff --git a/src/actions-runner-noruntime-noexternals/library_scripts.sh b/archive/actions-runner-noruntime-noexternals/library_scripts.sh similarity index 100% rename from src/actions-runner-noruntime-noexternals/library_scripts.sh rename to archive/actions-runner-noruntime-noexternals/library_scripts.sh diff --git a/src/actions-runner-noruntime/README.md b/archive/actions-runner-noruntime/README.md similarity index 100% rename from src/actions-runner-noruntime/README.md rename to archive/actions-runner-noruntime/README.md diff --git a/src/actions-runner-noruntime/devcontainer-feature.json b/archive/actions-runner-noruntime/devcontainer-feature.json similarity index 100% rename from src/actions-runner-noruntime/devcontainer-feature.json rename to archive/actions-runner-noruntime/devcontainer-feature.json diff --git a/src/actions-runner-noruntime/install.sh b/archive/actions-runner-noruntime/install.sh similarity index 100% rename from src/actions-runner-noruntime/install.sh rename to archive/actions-runner-noruntime/install.sh diff --git a/src/actions-runner-noruntime/library_scripts.sh b/archive/actions-runner-noruntime/library_scripts.sh similarity index 100% rename from src/actions-runner-noruntime/library_scripts.sh rename to archive/actions-runner-noruntime/library_scripts.sh diff --git a/src/age-keygen/README.md b/archive/age-keygen/README.md similarity index 100% rename from src/age-keygen/README.md rename to archive/age-keygen/README.md diff --git a/src/age-keygen/devcontainer-feature.json b/archive/age-keygen/devcontainer-feature.json similarity index 100% rename from src/age-keygen/devcontainer-feature.json rename to archive/age-keygen/devcontainer-feature.json diff --git a/src/age-keygen/install.sh b/archive/age-keygen/install.sh similarity index 100% rename from src/age-keygen/install.sh rename to archive/age-keygen/install.sh diff --git a/src/age-keygen/library_scripts.sh b/archive/age-keygen/library_scripts.sh similarity index 100% rename from src/age-keygen/library_scripts.sh rename to archive/age-keygen/library_scripts.sh diff --git a/src/age/README.md b/archive/age/README.md similarity index 100% rename from src/age/README.md rename to archive/age/README.md diff --git a/src/age/devcontainer-feature.json b/archive/age/devcontainer-feature.json similarity index 100% rename from src/age/devcontainer-feature.json rename to archive/age/devcontainer-feature.json diff --git a/src/age/install.sh b/archive/age/install.sh similarity index 100% rename from src/age/install.sh rename to archive/age/install.sh diff --git a/src/age/library_scripts.sh b/archive/age/library_scripts.sh similarity index 100% rename from src/age/library_scripts.sh rename to archive/age/library_scripts.sh diff --git a/src/btm/README.md b/archive/btm/README.md similarity index 100% rename from src/btm/README.md rename to archive/btm/README.md diff --git a/src/btm/devcontainer-feature.json b/archive/btm/devcontainer-feature.json similarity index 100% rename from src/btm/devcontainer-feature.json rename to archive/btm/devcontainer-feature.json diff --git a/src/btm/install.sh b/archive/btm/install.sh similarity index 100% rename from src/btm/install.sh rename to archive/btm/install.sh diff --git a/src/btm/library_scripts.sh b/archive/btm/library_scripts.sh similarity index 100% rename from src/btm/library_scripts.sh rename to archive/btm/library_scripts.sh diff --git a/src/cert-manager/README.md b/archive/cert-manager/README.md similarity index 100% rename from src/cert-manager/README.md rename to archive/cert-manager/README.md diff --git a/src/cert-manager/devcontainer-feature.json b/archive/cert-manager/devcontainer-feature.json similarity index 100% rename from src/cert-manager/devcontainer-feature.json rename to archive/cert-manager/devcontainer-feature.json diff --git a/src/cert-manager/install.sh b/archive/cert-manager/install.sh similarity index 100% rename from src/cert-manager/install.sh rename to archive/cert-manager/install.sh diff --git a/src/cert-manager/library_scripts.sh b/archive/cert-manager/library_scripts.sh similarity index 100% rename from src/cert-manager/library_scripts.sh rename to archive/cert-manager/library_scripts.sh diff --git a/src/cmctl-asdf/README.md b/archive/cmctl-asdf/README.md similarity index 100% rename from src/cmctl-asdf/README.md rename to archive/cmctl-asdf/README.md diff --git a/src/cmctl-asdf/devcontainer-feature.json b/archive/cmctl-asdf/devcontainer-feature.json similarity index 100% rename from src/cmctl-asdf/devcontainer-feature.json rename to archive/cmctl-asdf/devcontainer-feature.json diff --git a/src/cmctl-asdf/install.sh b/archive/cmctl-asdf/install.sh similarity index 100% rename from src/cmctl-asdf/install.sh rename to archive/cmctl-asdf/install.sh diff --git a/src/cmctl-asdf/library_scripts.sh b/archive/cmctl-asdf/library_scripts.sh similarity index 100% rename from src/cmctl-asdf/library_scripts.sh rename to archive/cmctl-asdf/library_scripts.sh diff --git a/src/croc/README.md b/archive/croc/README.md similarity index 100% rename from src/croc/README.md rename to archive/croc/README.md diff --git a/src/croc/devcontainer-feature.json b/archive/croc/devcontainer-feature.json similarity index 100% rename from src/croc/devcontainer-feature.json rename to archive/croc/devcontainer-feature.json diff --git a/src/croc/install.sh b/archive/croc/install.sh similarity index 100% rename from src/croc/install.sh rename to archive/croc/install.sh diff --git a/src/croc/library_scripts.sh b/archive/croc/library_scripts.sh similarity index 100% rename from src/croc/library_scripts.sh rename to archive/croc/library_scripts.sh diff --git a/src/cue-asdf/README.md b/archive/cue-asdf/README.md similarity index 100% rename from src/cue-asdf/README.md rename to archive/cue-asdf/README.md diff --git a/src/cue-asdf/devcontainer-feature.json b/archive/cue-asdf/devcontainer-feature.json similarity index 100% rename from src/cue-asdf/devcontainer-feature.json rename to archive/cue-asdf/devcontainer-feature.json diff --git a/src/cue-asdf/install.sh b/archive/cue-asdf/install.sh similarity index 100% rename from src/cue-asdf/install.sh rename to archive/cue-asdf/install.sh diff --git a/src/cue-asdf/library_scripts.sh b/archive/cue-asdf/library_scripts.sh similarity index 100% rename from src/cue-asdf/library_scripts.sh rename to archive/cue-asdf/library_scripts.sh diff --git a/src/edge-impulse-cli/README.md b/archive/edge-impulse-cli/README.md similarity index 100% rename from src/edge-impulse-cli/README.md rename to archive/edge-impulse-cli/README.md diff --git a/src/edge-impulse-cli/devcontainer-feature.json b/archive/edge-impulse-cli/devcontainer-feature.json similarity index 100% rename from src/edge-impulse-cli/devcontainer-feature.json rename to archive/edge-impulse-cli/devcontainer-feature.json diff --git a/src/edge-impulse-cli/install.sh b/archive/edge-impulse-cli/install.sh similarity index 100% rename from src/edge-impulse-cli/install.sh rename to archive/edge-impulse-cli/install.sh diff --git a/src/edge-impulse-cli/library_scripts.sh b/archive/edge-impulse-cli/library_scripts.sh similarity index 100% rename from src/edge-impulse-cli/library_scripts.sh rename to archive/edge-impulse-cli/library_scripts.sh diff --git a/src/elixir-asdf/README.md b/archive/elixir-asdf/README.md similarity index 100% rename from src/elixir-asdf/README.md rename to archive/elixir-asdf/README.md diff --git a/src/elixir-asdf/devcontainer-feature.json b/archive/elixir-asdf/devcontainer-feature.json similarity index 100% rename from src/elixir-asdf/devcontainer-feature.json rename to archive/elixir-asdf/devcontainer-feature.json diff --git a/src/elixir-asdf/install.sh b/archive/elixir-asdf/install.sh similarity index 100% rename from src/elixir-asdf/install.sh rename to archive/elixir-asdf/install.sh diff --git a/src/elixir-asdf/library_scripts.sh b/archive/elixir-asdf/library_scripts.sh similarity index 100% rename from src/elixir-asdf/library_scripts.sh rename to archive/elixir-asdf/library_scripts.sh diff --git a/src/erlang-asdf/README.md b/archive/erlang-asdf/README.md similarity index 100% rename from src/erlang-asdf/README.md rename to archive/erlang-asdf/README.md diff --git a/src/erlang-asdf/devcontainer-feature.json b/archive/erlang-asdf/devcontainer-feature.json similarity index 100% rename from src/erlang-asdf/devcontainer-feature.json rename to archive/erlang-asdf/devcontainer-feature.json diff --git a/src/erlang-asdf/install.sh b/archive/erlang-asdf/install.sh similarity index 100% rename from src/erlang-asdf/install.sh rename to archive/erlang-asdf/install.sh diff --git a/src/erlang-asdf/library_scripts.sh b/archive/erlang-asdf/library_scripts.sh similarity index 100% rename from src/erlang-asdf/library_scripts.sh rename to archive/erlang-asdf/library_scripts.sh diff --git a/src/ffmpeg-homebrew/README.md b/archive/ffmpeg-homebrew/README.md similarity index 100% rename from src/ffmpeg-homebrew/README.md rename to archive/ffmpeg-homebrew/README.md diff --git a/src/ffmpeg-homebrew/devcontainer-feature.json b/archive/ffmpeg-homebrew/devcontainer-feature.json similarity index 100% rename from src/ffmpeg-homebrew/devcontainer-feature.json rename to archive/ffmpeg-homebrew/devcontainer-feature.json diff --git a/src/ffmpeg-homebrew/install.sh b/archive/ffmpeg-homebrew/install.sh similarity index 100% rename from src/ffmpeg-homebrew/install.sh rename to archive/ffmpeg-homebrew/install.sh diff --git a/src/ffmpeg-homebrew/library_scripts.sh b/archive/ffmpeg-homebrew/library_scripts.sh similarity index 100% rename from src/ffmpeg-homebrew/library_scripts.sh rename to archive/ffmpeg-homebrew/library_scripts.sh diff --git a/src/gh-release/README.md b/archive/gh-release/README.md similarity index 100% rename from src/gh-release/README.md rename to archive/gh-release/README.md diff --git a/src/gh-release/devcontainer-feature.json b/archive/gh-release/devcontainer-feature.json similarity index 100% rename from src/gh-release/devcontainer-feature.json rename to archive/gh-release/devcontainer-feature.json diff --git a/src/gh-release/install.sh b/archive/gh-release/install.sh similarity index 100% rename from src/gh-release/install.sh rename to archive/gh-release/install.sh diff --git a/src/gh-release/library_scripts.sh b/archive/gh-release/library_scripts.sh similarity index 100% rename from src/gh-release/library_scripts.sh rename to archive/gh-release/library_scripts.sh diff --git a/src/homebrew-package/README.md b/archive/homebrew-package/README.md similarity index 100% rename from src/homebrew-package/README.md rename to archive/homebrew-package/README.md diff --git a/src/homebrew-package/devcontainer-feature.json b/archive/homebrew-package/devcontainer-feature.json similarity index 100% rename from src/homebrew-package/devcontainer-feature.json rename to archive/homebrew-package/devcontainer-feature.json diff --git a/src/homebrew-package/install.sh b/archive/homebrew-package/install.sh similarity index 100% rename from src/homebrew-package/install.sh rename to archive/homebrew-package/install.sh diff --git a/src/homebrew-package/library_scripts.sh b/archive/homebrew-package/library_scripts.sh similarity index 100% rename from src/homebrew-package/library_scripts.sh rename to archive/homebrew-package/library_scripts.sh diff --git a/src/ko/README.md b/archive/ko/README.md similarity index 100% rename from src/ko/README.md rename to archive/ko/README.md diff --git a/src/ko/devcontainer-feature.json b/archive/ko/devcontainer-feature.json similarity index 100% rename from src/ko/devcontainer-feature.json rename to archive/ko/devcontainer-feature.json diff --git a/src/ko/install.sh b/archive/ko/install.sh similarity index 100% rename from src/ko/install.sh rename to archive/ko/install.sh diff --git a/src/ko/library_scripts.sh b/archive/ko/library_scripts.sh similarity index 100% rename from src/ko/library_scripts.sh rename to archive/ko/library_scripts.sh diff --git a/src/kubescape/README.md b/archive/kubescape/README.md similarity index 100% rename from src/kubescape/README.md rename to archive/kubescape/README.md diff --git a/src/kubescape/devcontainer-feature.json b/archive/kubescape/devcontainer-feature.json similarity index 100% rename from src/kubescape/devcontainer-feature.json rename to archive/kubescape/devcontainer-feature.json diff --git a/src/kubescape/install.sh b/archive/kubescape/install.sh similarity index 100% rename from src/kubescape/install.sh rename to archive/kubescape/install.sh diff --git a/src/kubescape/library_scripts.sh b/archive/kubescape/library_scripts.sh similarity index 100% rename from src/kubescape/library_scripts.sh rename to archive/kubescape/library_scripts.sh diff --git a/src/mage/README.md b/archive/mage/README.md similarity index 100% rename from src/mage/README.md rename to archive/mage/README.md diff --git a/src/mage/devcontainer-feature.json b/archive/mage/devcontainer-feature.json similarity index 100% rename from src/mage/devcontainer-feature.json rename to archive/mage/devcontainer-feature.json diff --git a/src/mage/install.sh b/archive/mage/install.sh similarity index 100% rename from src/mage/install.sh rename to archive/mage/install.sh diff --git a/src/mage/library_scripts.sh b/archive/mage/library_scripts.sh similarity index 100% rename from src/mage/library_scripts.sh rename to archive/mage/library_scripts.sh diff --git a/src/meson-asdf/README.md b/archive/meson-asdf/README.md similarity index 100% rename from src/meson-asdf/README.md rename to archive/meson-asdf/README.md diff --git a/src/meson-asdf/devcontainer-feature.json b/archive/meson-asdf/devcontainer-feature.json similarity index 100% rename from src/meson-asdf/devcontainer-feature.json rename to archive/meson-asdf/devcontainer-feature.json diff --git a/src/meson-asdf/install.sh b/archive/meson-asdf/install.sh similarity index 100% rename from src/meson-asdf/install.sh rename to archive/meson-asdf/install.sh diff --git a/src/meson-asdf/library_scripts.sh b/archive/meson-asdf/library_scripts.sh similarity index 100% rename from src/meson-asdf/library_scripts.sh rename to archive/meson-asdf/library_scripts.sh diff --git a/src/meteor-cli/README.md b/archive/meteor-cli/README.md similarity index 100% rename from src/meteor-cli/README.md rename to archive/meteor-cli/README.md diff --git a/src/meteor-cli/devcontainer-feature.json b/archive/meteor-cli/devcontainer-feature.json similarity index 100% rename from src/meteor-cli/devcontainer-feature.json rename to archive/meteor-cli/devcontainer-feature.json diff --git a/src/meteor-cli/install.sh b/archive/meteor-cli/install.sh similarity index 100% rename from src/meteor-cli/install.sh rename to archive/meteor-cli/install.sh diff --git a/src/meteor-cli/library_scripts.sh b/archive/meteor-cli/library_scripts.sh similarity index 100% rename from src/meteor-cli/library_scripts.sh rename to archive/meteor-cli/library_scripts.sh diff --git a/src/nushell/README.md b/archive/nushell/README.md similarity index 100% rename from src/nushell/README.md rename to archive/nushell/README.md diff --git a/src/nushell/devcontainer-feature.json b/archive/nushell/devcontainer-feature.json similarity index 100% rename from src/nushell/devcontainer-feature.json rename to archive/nushell/devcontainer-feature.json diff --git a/src/nushell/install.sh b/archive/nushell/install.sh similarity index 100% rename from src/nushell/install.sh rename to archive/nushell/install.sh diff --git a/src/nushell/library_scripts.sh b/archive/nushell/library_scripts.sh similarity index 100% rename from src/nushell/library_scripts.sh rename to archive/nushell/library_scripts.sh diff --git a/src/ory-keto/README.md b/archive/ory-keto/README.md similarity index 100% rename from src/ory-keto/README.md rename to archive/ory-keto/README.md diff --git a/src/ory-keto/devcontainer-feature.json b/archive/ory-keto/devcontainer-feature.json similarity index 100% rename from src/ory-keto/devcontainer-feature.json rename to archive/ory-keto/devcontainer-feature.json diff --git a/src/ory-keto/install.sh b/archive/ory-keto/install.sh similarity index 100% rename from src/ory-keto/install.sh rename to archive/ory-keto/install.sh diff --git a/src/ory-keto/library_scripts.sh b/archive/ory-keto/library_scripts.sh similarity index 100% rename from src/ory-keto/library_scripts.sh rename to archive/ory-keto/library_scripts.sh diff --git a/src/pip-audit/README.md b/archive/pip-audit/README.md similarity index 100% rename from src/pip-audit/README.md rename to archive/pip-audit/README.md diff --git a/src/pip-audit/devcontainer-feature.json b/archive/pip-audit/devcontainer-feature.json similarity index 100% rename from src/pip-audit/devcontainer-feature.json rename to archive/pip-audit/devcontainer-feature.json diff --git a/src/pip-audit/install.sh b/archive/pip-audit/install.sh similarity index 100% rename from src/pip-audit/install.sh rename to archive/pip-audit/install.sh diff --git a/src/pip-audit/library_scripts.sh b/archive/pip-audit/library_scripts.sh similarity index 100% rename from src/pip-audit/library_scripts.sh rename to archive/pip-audit/library_scripts.sh diff --git a/src/porter/README.md b/archive/porter/README.md similarity index 100% rename from src/porter/README.md rename to archive/porter/README.md diff --git a/src/porter/devcontainer-feature.json b/archive/porter/devcontainer-feature.json similarity index 100% rename from src/porter/devcontainer-feature.json rename to archive/porter/devcontainer-feature.json diff --git a/src/porter/install.sh b/archive/porter/install.sh similarity index 100% rename from src/porter/install.sh rename to archive/porter/install.sh diff --git a/src/porter/library_scripts.sh b/archive/porter/library_scripts.sh similarity index 100% rename from src/porter/library_scripts.sh rename to archive/porter/library_scripts.sh diff --git a/src/postgres-asdf/README.md b/archive/postgres-asdf/README.md similarity index 100% rename from src/postgres-asdf/README.md rename to archive/postgres-asdf/README.md diff --git a/src/postgres-asdf/devcontainer-feature.json b/archive/postgres-asdf/devcontainer-feature.json similarity index 100% rename from src/postgres-asdf/devcontainer-feature.json rename to archive/postgres-asdf/devcontainer-feature.json diff --git a/src/postgres-asdf/install.sh b/archive/postgres-asdf/install.sh similarity index 100% rename from src/postgres-asdf/install.sh rename to archive/postgres-asdf/install.sh diff --git a/src/postgres-asdf/library_scripts.sh b/archive/postgres-asdf/library_scripts.sh similarity index 100% rename from src/postgres-asdf/library_scripts.sh rename to archive/postgres-asdf/library_scripts.sh diff --git a/src/rabbitmq-asdf/README.md b/archive/rabbitmq-asdf/README.md similarity index 100% rename from src/rabbitmq-asdf/README.md rename to archive/rabbitmq-asdf/README.md diff --git a/src/rabbitmq-asdf/devcontainer-feature.json b/archive/rabbitmq-asdf/devcontainer-feature.json similarity index 100% rename from src/rabbitmq-asdf/devcontainer-feature.json rename to archive/rabbitmq-asdf/devcontainer-feature.json diff --git a/src/rabbitmq-asdf/install.sh b/archive/rabbitmq-asdf/install.sh similarity index 100% rename from src/rabbitmq-asdf/install.sh rename to archive/rabbitmq-asdf/install.sh diff --git a/src/rabbitmq-asdf/library_scripts.sh b/archive/rabbitmq-asdf/library_scripts.sh similarity index 100% rename from src/rabbitmq-asdf/library_scripts.sh rename to archive/rabbitmq-asdf/library_scripts.sh diff --git a/src/scala-asdf/README.md b/archive/scala-asdf/README.md similarity index 100% rename from src/scala-asdf/README.md rename to archive/scala-asdf/README.md diff --git a/src/scala-asdf/devcontainer-feature.json b/archive/scala-asdf/devcontainer-feature.json similarity index 100% rename from src/scala-asdf/devcontainer-feature.json rename to archive/scala-asdf/devcontainer-feature.json diff --git a/src/scala-asdf/install.sh b/archive/scala-asdf/install.sh similarity index 100% rename from src/scala-asdf/install.sh rename to archive/scala-asdf/install.sh diff --git a/src/scala-asdf/library_scripts.sh b/archive/scala-asdf/library_scripts.sh similarity index 100% rename from src/scala-asdf/library_scripts.sh rename to archive/scala-asdf/library_scripts.sh diff --git a/src/serverless/README.md b/archive/serverless/README.md similarity index 100% rename from src/serverless/README.md rename to archive/serverless/README.md diff --git a/src/serverless/devcontainer-feature.json b/archive/serverless/devcontainer-feature.json similarity index 100% rename from src/serverless/devcontainer-feature.json rename to archive/serverless/devcontainer-feature.json diff --git a/src/serverless/install.sh b/archive/serverless/install.sh similarity index 100% rename from src/serverless/install.sh rename to archive/serverless/install.sh diff --git a/src/serverless/library_scripts.sh b/archive/serverless/library_scripts.sh similarity index 100% rename from src/serverless/library_scripts.sh rename to archive/serverless/library_scripts.sh diff --git a/src/sqlfmt/README.md b/archive/sqlfmt/README.md similarity index 100% rename from src/sqlfmt/README.md rename to archive/sqlfmt/README.md diff --git a/src/sqlfmt/devcontainer-feature.json b/archive/sqlfmt/devcontainer-feature.json similarity index 100% rename from src/sqlfmt/devcontainer-feature.json rename to archive/sqlfmt/devcontainer-feature.json diff --git a/src/sqlfmt/install.sh b/archive/sqlfmt/install.sh similarity index 100% rename from src/sqlfmt/install.sh rename to archive/sqlfmt/install.sh diff --git a/src/sqlfmt/library_scripts.sh b/archive/sqlfmt/library_scripts.sh similarity index 100% rename from src/sqlfmt/library_scripts.sh rename to archive/sqlfmt/library_scripts.sh diff --git a/src/syncthing/README.md b/archive/syncthing/README.md similarity index 100% rename from src/syncthing/README.md rename to archive/syncthing/README.md diff --git a/src/syncthing/devcontainer-feature.json b/archive/syncthing/devcontainer-feature.json similarity index 100% rename from src/syncthing/devcontainer-feature.json rename to archive/syncthing/devcontainer-feature.json diff --git a/src/syncthing/install.sh b/archive/syncthing/install.sh similarity index 100% rename from src/syncthing/install.sh rename to archive/syncthing/install.sh diff --git a/src/syncthing/library_scripts.sh b/archive/syncthing/library_scripts.sh similarity index 100% rename from src/syncthing/library_scripts.sh rename to archive/syncthing/library_scripts.sh diff --git a/src/trivy/README.md b/archive/trivy/README.md similarity index 100% rename from src/trivy/README.md rename to archive/trivy/README.md diff --git a/src/trivy/devcontainer-feature.json b/archive/trivy/devcontainer-feature.json similarity index 100% rename from src/trivy/devcontainer-feature.json rename to archive/trivy/devcontainer-feature.json diff --git a/src/trivy/install.sh b/archive/trivy/install.sh similarity index 100% rename from src/trivy/install.sh rename to archive/trivy/install.sh diff --git a/src/trivy/library_scripts.sh b/archive/trivy/library_scripts.sh similarity index 100% rename from src/trivy/library_scripts.sh rename to archive/trivy/library_scripts.sh diff --git a/src/vertx-sdkman/README.md b/archive/vertx-sdkman/README.md similarity index 100% rename from src/vertx-sdkman/README.md rename to archive/vertx-sdkman/README.md diff --git a/src/vertx-sdkman/devcontainer-feature.json b/archive/vertx-sdkman/devcontainer-feature.json similarity index 100% rename from src/vertx-sdkman/devcontainer-feature.json rename to archive/vertx-sdkman/devcontainer-feature.json diff --git a/src/vertx-sdkman/install.sh b/archive/vertx-sdkman/install.sh similarity index 100% rename from src/vertx-sdkman/install.sh rename to archive/vertx-sdkman/install.sh diff --git a/src/vertx-sdkman/library_scripts.sh b/archive/vertx-sdkman/library_scripts.sh similarity index 100% rename from src/vertx-sdkman/library_scripts.sh rename to archive/vertx-sdkman/library_scripts.sh diff --git a/src/visualvm-sdkman/README.md b/archive/visualvm-sdkman/README.md similarity index 100% rename from src/visualvm-sdkman/README.md rename to archive/visualvm-sdkman/README.md diff --git a/src/visualvm-sdkman/devcontainer-feature.json b/archive/visualvm-sdkman/devcontainer-feature.json similarity index 100% rename from src/visualvm-sdkman/devcontainer-feature.json rename to archive/visualvm-sdkman/devcontainer-feature.json diff --git a/src/visualvm-sdkman/install.sh b/archive/visualvm-sdkman/install.sh similarity index 100% rename from src/visualvm-sdkman/install.sh rename to archive/visualvm-sdkman/install.sh diff --git a/src/visualvm-sdkman/library_scripts.sh b/archive/visualvm-sdkman/library_scripts.sh similarity index 100% rename from src/visualvm-sdkman/library_scripts.sh rename to archive/visualvm-sdkman/library_scripts.sh diff --git a/src/volta/README.md b/archive/volta/README.md similarity index 100% rename from src/volta/README.md rename to archive/volta/README.md diff --git a/src/volta/devcontainer-feature.json b/archive/volta/devcontainer-feature.json similarity index 100% rename from src/volta/devcontainer-feature.json rename to archive/volta/devcontainer-feature.json diff --git a/src/volta/install.sh b/archive/volta/install.sh similarity index 100% rename from src/volta/install.sh rename to archive/volta/install.sh diff --git a/src/volta/library_scripts.sh b/archive/volta/library_scripts.sh similarity index 100% rename from src/volta/library_scripts.sh rename to archive/volta/library_scripts.sh diff --git a/src/xplr/README.md b/archive/xplr/README.md similarity index 100% rename from src/xplr/README.md rename to archive/xplr/README.md diff --git a/src/xplr/devcontainer-feature.json b/archive/xplr/devcontainer-feature.json similarity index 100% rename from src/xplr/devcontainer-feature.json rename to archive/xplr/devcontainer-feature.json diff --git a/src/xplr/install.sh b/archive/xplr/install.sh similarity index 100% rename from src/xplr/install.sh rename to archive/xplr/install.sh diff --git a/src/xplr/library_scripts.sh b/archive/xplr/library_scripts.sh similarity index 100% rename from src/xplr/library_scripts.sh rename to archive/xplr/library_scripts.sh From 3625bb25cf45f09aa15b937e13adbc23bca2dcf1 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 14 Sep 2024 08:12:50 +0000 Subject: [PATCH 15/38] fix(ci): assign permissions permissions for publish step --- .github/workflows/release.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 043ec90c6..c169370a0 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,6 +18,10 @@ jobs: needs: [run-tests] if: ${{ github.ref == 'refs/heads/main' }} runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + packages: write steps: - uses: actions/checkout@v3 From 4ab36a1f420cf1bcccf77de2543d4008e7087730 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 14 Sep 2024 09:00:19 +0000 Subject: [PATCH 16/38] feat: move failing features to archive --- {src => archive}/airplane-cli/README.md | 0 {src => archive}/airplane-cli/devcontainer-feature.json | 0 {src => archive}/airplane-cli/install.sh | 0 {src => archive}/airplane-cli/library_scripts.sh | 0 {src => archive}/bitwarden-cli/README.md | 0 {src => archive}/bitwarden-cli/devcontainer-feature.json | 0 {src => archive}/bitwarden-cli/install.sh | 0 {src => archive}/bitwarden-cli/library_scripts.sh | 0 {src => archive}/boundary-asdf/README.md | 0 {src => archive}/boundary-asdf/devcontainer-feature.json | 0 {src => archive}/boundary-asdf/install.sh | 0 {src => archive}/boundary-asdf/library_scripts.sh | 0 {src => archive}/codenotary-cas/README.md | 0 {src => archive}/codenotary-cas/devcontainer-feature.json | 0 {src => archive}/codenotary-cas/install.sh | 0 {src => archive}/codenotary-cas/library_scripts.sh | 0 16 files changed, 0 insertions(+), 0 deletions(-) rename {src => archive}/airplane-cli/README.md (100%) rename {src => archive}/airplane-cli/devcontainer-feature.json (100%) rename {src => archive}/airplane-cli/install.sh (100%) rename {src => archive}/airplane-cli/library_scripts.sh (100%) rename {src => archive}/bitwarden-cli/README.md (100%) rename {src => archive}/bitwarden-cli/devcontainer-feature.json (100%) rename {src => archive}/bitwarden-cli/install.sh (100%) rename {src => archive}/bitwarden-cli/library_scripts.sh (100%) rename {src => archive}/boundary-asdf/README.md (100%) rename {src => archive}/boundary-asdf/devcontainer-feature.json (100%) rename {src => archive}/boundary-asdf/install.sh (100%) rename {src => archive}/boundary-asdf/library_scripts.sh (100%) rename {src => archive}/codenotary-cas/README.md (100%) rename {src => archive}/codenotary-cas/devcontainer-feature.json (100%) rename {src => archive}/codenotary-cas/install.sh (100%) rename {src => archive}/codenotary-cas/library_scripts.sh (100%) diff --git a/src/airplane-cli/README.md b/archive/airplane-cli/README.md similarity index 100% rename from src/airplane-cli/README.md rename to archive/airplane-cli/README.md diff --git a/src/airplane-cli/devcontainer-feature.json b/archive/airplane-cli/devcontainer-feature.json similarity index 100% rename from src/airplane-cli/devcontainer-feature.json rename to archive/airplane-cli/devcontainer-feature.json diff --git a/src/airplane-cli/install.sh b/archive/airplane-cli/install.sh similarity index 100% rename from src/airplane-cli/install.sh rename to archive/airplane-cli/install.sh diff --git a/src/airplane-cli/library_scripts.sh b/archive/airplane-cli/library_scripts.sh similarity index 100% rename from src/airplane-cli/library_scripts.sh rename to archive/airplane-cli/library_scripts.sh diff --git a/src/bitwarden-cli/README.md b/archive/bitwarden-cli/README.md similarity index 100% rename from src/bitwarden-cli/README.md rename to archive/bitwarden-cli/README.md diff --git a/src/bitwarden-cli/devcontainer-feature.json b/archive/bitwarden-cli/devcontainer-feature.json similarity index 100% rename from src/bitwarden-cli/devcontainer-feature.json rename to archive/bitwarden-cli/devcontainer-feature.json diff --git a/src/bitwarden-cli/install.sh b/archive/bitwarden-cli/install.sh similarity index 100% rename from src/bitwarden-cli/install.sh rename to archive/bitwarden-cli/install.sh diff --git a/src/bitwarden-cli/library_scripts.sh b/archive/bitwarden-cli/library_scripts.sh similarity index 100% rename from src/bitwarden-cli/library_scripts.sh rename to archive/bitwarden-cli/library_scripts.sh diff --git a/src/boundary-asdf/README.md b/archive/boundary-asdf/README.md similarity index 100% rename from src/boundary-asdf/README.md rename to archive/boundary-asdf/README.md diff --git a/src/boundary-asdf/devcontainer-feature.json b/archive/boundary-asdf/devcontainer-feature.json similarity index 100% rename from src/boundary-asdf/devcontainer-feature.json rename to archive/boundary-asdf/devcontainer-feature.json diff --git a/src/boundary-asdf/install.sh b/archive/boundary-asdf/install.sh similarity index 100% rename from src/boundary-asdf/install.sh rename to archive/boundary-asdf/install.sh diff --git a/src/boundary-asdf/library_scripts.sh b/archive/boundary-asdf/library_scripts.sh similarity index 100% rename from src/boundary-asdf/library_scripts.sh rename to archive/boundary-asdf/library_scripts.sh diff --git a/src/codenotary-cas/README.md b/archive/codenotary-cas/README.md similarity index 100% rename from src/codenotary-cas/README.md rename to archive/codenotary-cas/README.md diff --git a/src/codenotary-cas/devcontainer-feature.json b/archive/codenotary-cas/devcontainer-feature.json similarity index 100% rename from src/codenotary-cas/devcontainer-feature.json rename to archive/codenotary-cas/devcontainer-feature.json diff --git a/src/codenotary-cas/install.sh b/archive/codenotary-cas/install.sh similarity index 100% rename from src/codenotary-cas/install.sh rename to archive/codenotary-cas/install.sh diff --git a/src/codenotary-cas/library_scripts.sh b/archive/codenotary-cas/library_scripts.sh similarity index 100% rename from src/codenotary-cas/library_scripts.sh rename to archive/codenotary-cas/library_scripts.sh From fcfd18eb283173688a727982152854c00c426c7a Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 14 Sep 2024 09:08:14 +0000 Subject: [PATCH 17/38] chore: archive tests of failing packages --- archive/{ => src}/actions-runner-noexternals/README.md | 0 .../actions-runner-noexternals/devcontainer-feature.json | 0 archive/{ => src}/actions-runner-noexternals/install.sh | 0 archive/{ => src}/actions-runner-noexternals/library_scripts.sh | 0 archive/{ => src}/actions-runner-noruntime-noexternals/README.md | 0 .../devcontainer-feature.json | 0 archive/{ => src}/actions-runner-noruntime-noexternals/install.sh | 0 .../actions-runner-noruntime-noexternals/library_scripts.sh | 0 archive/{ => src}/actions-runner-noruntime/README.md | 0 .../{ => src}/actions-runner-noruntime/devcontainer-feature.json | 0 archive/{ => src}/actions-runner-noruntime/install.sh | 0 archive/{ => src}/actions-runner-noruntime/library_scripts.sh | 0 archive/{ => src}/age-keygen/README.md | 0 archive/{ => src}/age-keygen/devcontainer-feature.json | 0 archive/{ => src}/age-keygen/install.sh | 0 archive/{ => src}/age-keygen/library_scripts.sh | 0 archive/{ => src}/age/README.md | 0 archive/{ => src}/age/devcontainer-feature.json | 0 archive/{ => src}/age/install.sh | 0 archive/{ => src}/age/library_scripts.sh | 0 archive/{ => src}/airplane-cli/README.md | 0 archive/{ => src}/airplane-cli/devcontainer-feature.json | 0 archive/{ => src}/airplane-cli/install.sh | 0 archive/{ => src}/airplane-cli/library_scripts.sh | 0 archive/{ => src}/bitwarden-cli/README.md | 0 archive/{ => src}/bitwarden-cli/devcontainer-feature.json | 0 archive/{ => src}/bitwarden-cli/install.sh | 0 archive/{ => src}/bitwarden-cli/library_scripts.sh | 0 archive/{ => src}/boundary-asdf/README.md | 0 archive/{ => src}/boundary-asdf/devcontainer-feature.json | 0 archive/{ => src}/boundary-asdf/install.sh | 0 archive/{ => src}/boundary-asdf/library_scripts.sh | 0 archive/{ => src}/btm/README.md | 0 archive/{ => src}/btm/devcontainer-feature.json | 0 archive/{ => src}/btm/install.sh | 0 archive/{ => src}/btm/library_scripts.sh | 0 archive/{ => src}/cert-manager/README.md | 0 archive/{ => src}/cert-manager/devcontainer-feature.json | 0 archive/{ => src}/cert-manager/install.sh | 0 archive/{ => src}/cert-manager/library_scripts.sh | 0 archive/{ => src}/cmctl-asdf/README.md | 0 archive/{ => src}/cmctl-asdf/devcontainer-feature.json | 0 archive/{ => src}/cmctl-asdf/install.sh | 0 archive/{ => src}/cmctl-asdf/library_scripts.sh | 0 archive/{ => src}/codenotary-cas/README.md | 0 archive/{ => src}/codenotary-cas/devcontainer-feature.json | 0 archive/{ => src}/codenotary-cas/install.sh | 0 archive/{ => src}/codenotary-cas/library_scripts.sh | 0 archive/{ => src}/croc/README.md | 0 archive/{ => src}/croc/devcontainer-feature.json | 0 archive/{ => src}/croc/install.sh | 0 archive/{ => src}/croc/library_scripts.sh | 0 archive/{ => src}/cue-asdf/README.md | 0 archive/{ => src}/cue-asdf/devcontainer-feature.json | 0 archive/{ => src}/cue-asdf/install.sh | 0 archive/{ => src}/cue-asdf/library_scripts.sh | 0 archive/{ => src}/edge-impulse-cli/README.md | 0 archive/{ => src}/edge-impulse-cli/devcontainer-feature.json | 0 archive/{ => src}/edge-impulse-cli/install.sh | 0 archive/{ => src}/edge-impulse-cli/library_scripts.sh | 0 archive/{ => src}/elixir-asdf/README.md | 0 archive/{ => src}/elixir-asdf/devcontainer-feature.json | 0 archive/{ => src}/elixir-asdf/install.sh | 0 archive/{ => src}/elixir-asdf/library_scripts.sh | 0 archive/{ => src}/erlang-asdf/README.md | 0 archive/{ => src}/erlang-asdf/devcontainer-feature.json | 0 archive/{ => src}/erlang-asdf/install.sh | 0 archive/{ => src}/erlang-asdf/library_scripts.sh | 0 archive/{ => src}/ffmpeg-homebrew/README.md | 0 archive/{ => src}/ffmpeg-homebrew/devcontainer-feature.json | 0 archive/{ => src}/ffmpeg-homebrew/install.sh | 0 archive/{ => src}/ffmpeg-homebrew/library_scripts.sh | 0 archive/{ => src}/gh-release/README.md | 0 archive/{ => src}/gh-release/devcontainer-feature.json | 0 archive/{ => src}/gh-release/install.sh | 0 archive/{ => src}/gh-release/library_scripts.sh | 0 archive/{ => src}/homebrew-package/README.md | 0 archive/{ => src}/homebrew-package/devcontainer-feature.json | 0 archive/{ => src}/homebrew-package/install.sh | 0 archive/{ => src}/homebrew-package/library_scripts.sh | 0 archive/{ => src}/ko/README.md | 0 archive/{ => src}/ko/devcontainer-feature.json | 0 archive/{ => src}/ko/install.sh | 0 archive/{ => src}/ko/library_scripts.sh | 0 archive/{ => src}/kubescape/README.md | 0 archive/{ => src}/kubescape/devcontainer-feature.json | 0 archive/{ => src}/kubescape/install.sh | 0 archive/{ => src}/kubescape/library_scripts.sh | 0 archive/{ => src}/mage/README.md | 0 archive/{ => src}/mage/devcontainer-feature.json | 0 archive/{ => src}/mage/install.sh | 0 archive/{ => src}/mage/library_scripts.sh | 0 archive/{ => src}/meson-asdf/README.md | 0 archive/{ => src}/meson-asdf/devcontainer-feature.json | 0 archive/{ => src}/meson-asdf/install.sh | 0 archive/{ => src}/meson-asdf/library_scripts.sh | 0 archive/{ => src}/meteor-cli/README.md | 0 archive/{ => src}/meteor-cli/devcontainer-feature.json | 0 archive/{ => src}/meteor-cli/install.sh | 0 archive/{ => src}/meteor-cli/library_scripts.sh | 0 archive/{ => src}/nushell/README.md | 0 archive/{ => src}/nushell/devcontainer-feature.json | 0 archive/{ => src}/nushell/install.sh | 0 archive/{ => src}/nushell/library_scripts.sh | 0 archive/{ => src}/ory-keto/README.md | 0 archive/{ => src}/ory-keto/devcontainer-feature.json | 0 archive/{ => src}/ory-keto/install.sh | 0 archive/{ => src}/ory-keto/library_scripts.sh | 0 archive/{ => src}/pip-audit/README.md | 0 archive/{ => src}/pip-audit/devcontainer-feature.json | 0 archive/{ => src}/pip-audit/install.sh | 0 archive/{ => src}/pip-audit/library_scripts.sh | 0 archive/{ => src}/porter/README.md | 0 archive/{ => src}/porter/devcontainer-feature.json | 0 archive/{ => src}/porter/install.sh | 0 archive/{ => src}/porter/library_scripts.sh | 0 archive/{ => src}/postgres-asdf/README.md | 0 archive/{ => src}/postgres-asdf/devcontainer-feature.json | 0 archive/{ => src}/postgres-asdf/install.sh | 0 archive/{ => src}/postgres-asdf/library_scripts.sh | 0 archive/{ => src}/rabbitmq-asdf/README.md | 0 archive/{ => src}/rabbitmq-asdf/devcontainer-feature.json | 0 archive/{ => src}/rabbitmq-asdf/install.sh | 0 archive/{ => src}/rabbitmq-asdf/library_scripts.sh | 0 archive/{ => src}/scala-asdf/README.md | 0 archive/{ => src}/scala-asdf/devcontainer-feature.json | 0 archive/{ => src}/scala-asdf/install.sh | 0 archive/{ => src}/scala-asdf/library_scripts.sh | 0 archive/{ => src}/serverless/README.md | 0 archive/{ => src}/serverless/devcontainer-feature.json | 0 archive/{ => src}/serverless/install.sh | 0 archive/{ => src}/serverless/library_scripts.sh | 0 archive/{ => src}/sqlfmt/README.md | 0 archive/{ => src}/sqlfmt/devcontainer-feature.json | 0 archive/{ => src}/sqlfmt/install.sh | 0 archive/{ => src}/sqlfmt/library_scripts.sh | 0 archive/{ => src}/syncthing/README.md | 0 archive/{ => src}/syncthing/devcontainer-feature.json | 0 archive/{ => src}/syncthing/install.sh | 0 archive/{ => src}/syncthing/library_scripts.sh | 0 archive/{ => src}/trivy/README.md | 0 archive/{ => src}/trivy/devcontainer-feature.json | 0 archive/{ => src}/trivy/install.sh | 0 archive/{ => src}/trivy/library_scripts.sh | 0 archive/{ => src}/vertx-sdkman/README.md | 0 archive/{ => src}/vertx-sdkman/devcontainer-feature.json | 0 archive/{ => src}/vertx-sdkman/install.sh | 0 archive/{ => src}/vertx-sdkman/library_scripts.sh | 0 archive/{ => src}/visualvm-sdkman/README.md | 0 archive/{ => src}/visualvm-sdkman/devcontainer-feature.json | 0 archive/{ => src}/visualvm-sdkman/install.sh | 0 archive/{ => src}/visualvm-sdkman/library_scripts.sh | 0 archive/{ => src}/volta/README.md | 0 archive/{ => src}/volta/devcontainer-feature.json | 0 archive/{ => src}/volta/install.sh | 0 archive/{ => src}/volta/library_scripts.sh | 0 archive/{ => src}/xplr/README.md | 0 archive/{ => src}/xplr/devcontainer-feature.json | 0 archive/{ => src}/xplr/install.sh | 0 archive/{ => src}/xplr/library_scripts.sh | 0 {test => archive/test}/actions-runner-noexternals/scenarios.json | 0 .../test}/actions-runner-noexternals/test_defaults_debian.sh | 0 .../test}/actions-runner-noruntime-noexternals/scenarios.json | 0 .../actions-runner-noruntime-noexternals/test_defaults_debian.sh | 0 {test => archive/test}/actions-runner-noruntime/scenarios.json | 0 .../test}/actions-runner-noruntime/test_defaults_debian.sh | 0 {test => archive/test}/age-keygen/scenarios.json | 0 {test => archive/test}/age-keygen/test_defaults_debian.sh | 0 {test => archive/test}/age/scenarios.json | 0 {test => archive/test}/age/test_defaults_debian.sh | 0 {test => archive/test}/airplane-cli/scenarios.json | 0 {test => archive/test}/airplane-cli/test_defaults_debian.sh | 0 {test => archive/test}/bitwarden-cli/scenarios.json | 0 {test => archive/test}/bitwarden-cli/test_defaults_debian.sh | 0 {test => archive/test}/boundary-asdf/scenarios.json | 0 {test => archive/test}/boundary-asdf/test.sh | 0 {test => archive/test}/btm/scenarios.json | 0 {test => archive/test}/btm/test_defaults_debian.sh | 0 {test => archive/test}/cert-manager/scenarios.json | 0 {test => archive/test}/cert-manager/test_defaults_debian.sh | 0 {test => archive/test}/cmctl-asdf/scenarios.json | 0 {test => archive/test}/cmctl-asdf/test.sh | 0 {test => archive/test}/codenotary-cas/scenarios.json | 0 {test => archive/test}/codenotary-cas/test_defaults_debian.sh | 0 {test => archive/test}/croc/scenarios.json | 0 {test => archive/test}/croc/test_defaults_debian.sh | 0 {test => archive/test}/cue-asdf/scenarios.json | 0 {test => archive/test}/cue-asdf/test.sh | 0 {test => archive/test}/edge-impulse-cli/scenarios.json | 0 {test => archive/test}/edge-impulse-cli/test.sh | 0 {test => archive/test}/elixir-asdf/scenarios.json | 0 {test => archive/test}/elixir-asdf/test_debian_11.sh | 0 {test => archive/test}/elixir-asdf/test_debian_12.sh | 0 {test => archive/test}/elixir-asdf/test_ubuntu_2004.sh | 0 {test => archive/test}/elixir-asdf/test_ubuntu_2204.sh | 0 {test => archive/test}/erlang-asdf/scenarios.json | 0 {test => archive/test}/erlang-asdf/test.sh | 0 {test => archive/test}/erlang-asdf/test_debian_bullseye.sh | 0 {test => archive/test}/erlang-asdf/test_ubuntu.sh | 0 {test => archive/test}/ffmpeg-homebrew/scenarios.json | 0 {test => archive/test}/ffmpeg-homebrew/test.sh | 0 {test => archive/test}/gh-release/scenarios.json | 0 {test => archive/test}/gh-release/test_act.sh | 0 {test => archive/test}/gh-release/test_apiops.sh | 0 {test => archive/test}/gh-release/test_defaults_alpine.sh | 0 {test => archive/test}/gh-release/test_defaults_debian.sh | 0 {test => archive/test}/gh-release/test_etcd.sh | 0 {test => archive/test}/gh-release/test_no_git.sh | 0 {test => archive/test}/gh-release/test_powershell.sh | 0 {test => archive/test}/homebrew-package/scenarios.json | 0 {test => archive/test}/homebrew-package/test_file_limit.sh | 0 {test => archive/test}/homebrew-package/test_git_based_version.sh | 0 {test => archive/test}/homebrew-package/test_latest.sh | 0 {test => archive/test}/homebrew-package/test_specific_version.sh | 0 {test => archive/test}/homebrew-package/test_universal.sh | 0 {test => archive/test}/ko/scenarios.json | 0 {test => archive/test}/ko/test_defaults_debian.sh | 0 {test => archive/test}/kubescape/scenarios.json | 0 {test => archive/test}/kubescape/test_defaults_debian.sh | 0 {test => archive/test}/mage/scenarios.json | 0 {test => archive/test}/mage/test_defaults_debian.sh | 0 {test => archive/test}/meson-asdf/scenarios.json | 0 {test => archive/test}/meson-asdf/test.sh | 0 {test => archive/test}/meteor-cli/scenarios.json | 0 {test => archive/test}/meteor-cli/test.sh | 0 {test => archive/test}/nushell/scenarios.json | 0 {test => archive/test}/nushell/test_defaults_debian.sh | 0 {test => archive/test}/ory-keto/scenarios.json | 0 {test => archive/test}/ory-keto/test_defaults_debian.sh | 0 {test => archive/test}/pip-audit/scenarios.json | 0 {test => archive/test}/pip-audit/test_defaults.sh | 0 {test => archive/test}/porter/install_mixins.sh | 0 {test => archive/test}/porter/scenarios.json | 0 {test => archive/test}/porter/test.sh | 0 {test => archive/test}/postgres-asdf/scenarios.json | 0 {test => archive/test}/postgres-asdf/test.sh | 0 {test => archive/test}/rabbitmq-asdf/scenarios.json | 0 {test => archive/test}/rabbitmq-asdf/test.sh | 0 {test => archive/test}/scala-asdf/scenarios.json | 0 {test => archive/test}/scala-asdf/test.sh | 0 {test => archive/test}/serverless/scenarios.json | 0 {test => archive/test}/serverless/test.sh | 0 {test => archive/test}/sqlfmt/scenarios.json | 0 {test => archive/test}/sqlfmt/test.sh | 0 {test => archive/test}/syncthing/scenarios.json | 0 {test => archive/test}/syncthing/test_defaults_debian.sh | 0 {test => archive/test}/trivy/scenarios.json | 0 {test => archive/test}/trivy/test_defaults_debian.sh | 0 {test => archive/test}/vertx-sdkman/scenarios.json | 0 {test => archive/test}/vertx-sdkman/test.sh | 0 {test => archive/test}/visualvm-sdkman/scenarios.json | 0 {test => archive/test}/visualvm-sdkman/test.sh | 0 {test => archive/test}/volta/scenarios.json | 0 {test => archive/test}/volta/test_defaults_debian.sh | 0 {test => archive/test}/xplr/scenarios.json | 0 {test => archive/test}/xplr/test_defaults_debian.sh | 0 256 files changed, 0 insertions(+), 0 deletions(-) rename archive/{ => src}/actions-runner-noexternals/README.md (100%) rename archive/{ => src}/actions-runner-noexternals/devcontainer-feature.json (100%) rename archive/{ => src}/actions-runner-noexternals/install.sh (100%) rename archive/{ => src}/actions-runner-noexternals/library_scripts.sh (100%) rename archive/{ => src}/actions-runner-noruntime-noexternals/README.md (100%) rename archive/{ => src}/actions-runner-noruntime-noexternals/devcontainer-feature.json (100%) rename archive/{ => src}/actions-runner-noruntime-noexternals/install.sh (100%) rename archive/{ => src}/actions-runner-noruntime-noexternals/library_scripts.sh (100%) rename archive/{ => src}/actions-runner-noruntime/README.md (100%) rename archive/{ => src}/actions-runner-noruntime/devcontainer-feature.json (100%) rename archive/{ => src}/actions-runner-noruntime/install.sh (100%) rename archive/{ => src}/actions-runner-noruntime/library_scripts.sh (100%) rename archive/{ => src}/age-keygen/README.md (100%) rename archive/{ => src}/age-keygen/devcontainer-feature.json (100%) rename archive/{ => src}/age-keygen/install.sh (100%) rename archive/{ => src}/age-keygen/library_scripts.sh (100%) rename archive/{ => src}/age/README.md (100%) rename archive/{ => src}/age/devcontainer-feature.json (100%) rename archive/{ => src}/age/install.sh (100%) rename archive/{ => src}/age/library_scripts.sh (100%) rename archive/{ => src}/airplane-cli/README.md (100%) rename archive/{ => src}/airplane-cli/devcontainer-feature.json (100%) rename archive/{ => src}/airplane-cli/install.sh (100%) rename archive/{ => src}/airplane-cli/library_scripts.sh (100%) rename archive/{ => src}/bitwarden-cli/README.md (100%) rename archive/{ => src}/bitwarden-cli/devcontainer-feature.json (100%) rename archive/{ => src}/bitwarden-cli/install.sh (100%) rename archive/{ => src}/bitwarden-cli/library_scripts.sh (100%) rename archive/{ => src}/boundary-asdf/README.md (100%) rename archive/{ => src}/boundary-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/boundary-asdf/install.sh (100%) rename archive/{ => src}/boundary-asdf/library_scripts.sh (100%) rename archive/{ => src}/btm/README.md (100%) rename archive/{ => src}/btm/devcontainer-feature.json (100%) rename archive/{ => src}/btm/install.sh (100%) rename archive/{ => src}/btm/library_scripts.sh (100%) rename archive/{ => src}/cert-manager/README.md (100%) rename archive/{ => src}/cert-manager/devcontainer-feature.json (100%) rename archive/{ => src}/cert-manager/install.sh (100%) rename archive/{ => src}/cert-manager/library_scripts.sh (100%) rename archive/{ => src}/cmctl-asdf/README.md (100%) rename archive/{ => src}/cmctl-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/cmctl-asdf/install.sh (100%) rename archive/{ => src}/cmctl-asdf/library_scripts.sh (100%) rename archive/{ => src}/codenotary-cas/README.md (100%) rename archive/{ => src}/codenotary-cas/devcontainer-feature.json (100%) rename archive/{ => src}/codenotary-cas/install.sh (100%) rename archive/{ => src}/codenotary-cas/library_scripts.sh (100%) rename archive/{ => src}/croc/README.md (100%) rename archive/{ => src}/croc/devcontainer-feature.json (100%) rename archive/{ => src}/croc/install.sh (100%) rename archive/{ => src}/croc/library_scripts.sh (100%) rename archive/{ => src}/cue-asdf/README.md (100%) rename archive/{ => src}/cue-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/cue-asdf/install.sh (100%) rename archive/{ => src}/cue-asdf/library_scripts.sh (100%) rename archive/{ => src}/edge-impulse-cli/README.md (100%) rename archive/{ => src}/edge-impulse-cli/devcontainer-feature.json (100%) rename archive/{ => src}/edge-impulse-cli/install.sh (100%) rename archive/{ => src}/edge-impulse-cli/library_scripts.sh (100%) rename archive/{ => src}/elixir-asdf/README.md (100%) rename archive/{ => src}/elixir-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/elixir-asdf/install.sh (100%) rename archive/{ => src}/elixir-asdf/library_scripts.sh (100%) rename archive/{ => src}/erlang-asdf/README.md (100%) rename archive/{ => src}/erlang-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/erlang-asdf/install.sh (100%) rename archive/{ => src}/erlang-asdf/library_scripts.sh (100%) rename archive/{ => src}/ffmpeg-homebrew/README.md (100%) rename archive/{ => src}/ffmpeg-homebrew/devcontainer-feature.json (100%) rename archive/{ => src}/ffmpeg-homebrew/install.sh (100%) rename archive/{ => src}/ffmpeg-homebrew/library_scripts.sh (100%) rename archive/{ => src}/gh-release/README.md (100%) rename archive/{ => src}/gh-release/devcontainer-feature.json (100%) rename archive/{ => src}/gh-release/install.sh (100%) rename archive/{ => src}/gh-release/library_scripts.sh (100%) rename archive/{ => src}/homebrew-package/README.md (100%) rename archive/{ => src}/homebrew-package/devcontainer-feature.json (100%) rename archive/{ => src}/homebrew-package/install.sh (100%) rename archive/{ => src}/homebrew-package/library_scripts.sh (100%) rename archive/{ => src}/ko/README.md (100%) rename archive/{ => src}/ko/devcontainer-feature.json (100%) rename archive/{ => src}/ko/install.sh (100%) rename archive/{ => src}/ko/library_scripts.sh (100%) rename archive/{ => src}/kubescape/README.md (100%) rename archive/{ => src}/kubescape/devcontainer-feature.json (100%) rename archive/{ => src}/kubescape/install.sh (100%) rename archive/{ => src}/kubescape/library_scripts.sh (100%) rename archive/{ => src}/mage/README.md (100%) rename archive/{ => src}/mage/devcontainer-feature.json (100%) rename archive/{ => src}/mage/install.sh (100%) rename archive/{ => src}/mage/library_scripts.sh (100%) rename archive/{ => src}/meson-asdf/README.md (100%) rename archive/{ => src}/meson-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/meson-asdf/install.sh (100%) rename archive/{ => src}/meson-asdf/library_scripts.sh (100%) rename archive/{ => src}/meteor-cli/README.md (100%) rename archive/{ => src}/meteor-cli/devcontainer-feature.json (100%) rename archive/{ => src}/meteor-cli/install.sh (100%) rename archive/{ => src}/meteor-cli/library_scripts.sh (100%) rename archive/{ => src}/nushell/README.md (100%) rename archive/{ => src}/nushell/devcontainer-feature.json (100%) rename archive/{ => src}/nushell/install.sh (100%) rename archive/{ => src}/nushell/library_scripts.sh (100%) rename archive/{ => src}/ory-keto/README.md (100%) rename archive/{ => src}/ory-keto/devcontainer-feature.json (100%) rename archive/{ => src}/ory-keto/install.sh (100%) rename archive/{ => src}/ory-keto/library_scripts.sh (100%) rename archive/{ => src}/pip-audit/README.md (100%) rename archive/{ => src}/pip-audit/devcontainer-feature.json (100%) rename archive/{ => src}/pip-audit/install.sh (100%) rename archive/{ => src}/pip-audit/library_scripts.sh (100%) rename archive/{ => src}/porter/README.md (100%) rename archive/{ => src}/porter/devcontainer-feature.json (100%) rename archive/{ => src}/porter/install.sh (100%) rename archive/{ => src}/porter/library_scripts.sh (100%) rename archive/{ => src}/postgres-asdf/README.md (100%) rename archive/{ => src}/postgres-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/postgres-asdf/install.sh (100%) rename archive/{ => src}/postgres-asdf/library_scripts.sh (100%) rename archive/{ => src}/rabbitmq-asdf/README.md (100%) rename archive/{ => src}/rabbitmq-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/rabbitmq-asdf/install.sh (100%) rename archive/{ => src}/rabbitmq-asdf/library_scripts.sh (100%) rename archive/{ => src}/scala-asdf/README.md (100%) rename archive/{ => src}/scala-asdf/devcontainer-feature.json (100%) rename archive/{ => src}/scala-asdf/install.sh (100%) rename archive/{ => src}/scala-asdf/library_scripts.sh (100%) rename archive/{ => src}/serverless/README.md (100%) rename archive/{ => src}/serverless/devcontainer-feature.json (100%) rename archive/{ => src}/serverless/install.sh (100%) rename archive/{ => src}/serverless/library_scripts.sh (100%) rename archive/{ => src}/sqlfmt/README.md (100%) rename archive/{ => src}/sqlfmt/devcontainer-feature.json (100%) rename archive/{ => src}/sqlfmt/install.sh (100%) rename archive/{ => src}/sqlfmt/library_scripts.sh (100%) rename archive/{ => src}/syncthing/README.md (100%) rename archive/{ => src}/syncthing/devcontainer-feature.json (100%) rename archive/{ => src}/syncthing/install.sh (100%) rename archive/{ => src}/syncthing/library_scripts.sh (100%) rename archive/{ => src}/trivy/README.md (100%) rename archive/{ => src}/trivy/devcontainer-feature.json (100%) rename archive/{ => src}/trivy/install.sh (100%) rename archive/{ => src}/trivy/library_scripts.sh (100%) rename archive/{ => src}/vertx-sdkman/README.md (100%) rename archive/{ => src}/vertx-sdkman/devcontainer-feature.json (100%) rename archive/{ => src}/vertx-sdkman/install.sh (100%) rename archive/{ => src}/vertx-sdkman/library_scripts.sh (100%) rename archive/{ => src}/visualvm-sdkman/README.md (100%) rename archive/{ => src}/visualvm-sdkman/devcontainer-feature.json (100%) rename archive/{ => src}/visualvm-sdkman/install.sh (100%) rename archive/{ => src}/visualvm-sdkman/library_scripts.sh (100%) rename archive/{ => src}/volta/README.md (100%) rename archive/{ => src}/volta/devcontainer-feature.json (100%) rename archive/{ => src}/volta/install.sh (100%) rename archive/{ => src}/volta/library_scripts.sh (100%) rename archive/{ => src}/xplr/README.md (100%) rename archive/{ => src}/xplr/devcontainer-feature.json (100%) rename archive/{ => src}/xplr/install.sh (100%) rename archive/{ => src}/xplr/library_scripts.sh (100%) rename {test => archive/test}/actions-runner-noexternals/scenarios.json (100%) rename {test => archive/test}/actions-runner-noexternals/test_defaults_debian.sh (100%) rename {test => archive/test}/actions-runner-noruntime-noexternals/scenarios.json (100%) rename {test => archive/test}/actions-runner-noruntime-noexternals/test_defaults_debian.sh (100%) rename {test => archive/test}/actions-runner-noruntime/scenarios.json (100%) rename {test => archive/test}/actions-runner-noruntime/test_defaults_debian.sh (100%) rename {test => archive/test}/age-keygen/scenarios.json (100%) rename {test => archive/test}/age-keygen/test_defaults_debian.sh (100%) rename {test => archive/test}/age/scenarios.json (100%) rename {test => archive/test}/age/test_defaults_debian.sh (100%) rename {test => archive/test}/airplane-cli/scenarios.json (100%) rename {test => archive/test}/airplane-cli/test_defaults_debian.sh (100%) rename {test => archive/test}/bitwarden-cli/scenarios.json (100%) rename {test => archive/test}/bitwarden-cli/test_defaults_debian.sh (100%) rename {test => archive/test}/boundary-asdf/scenarios.json (100%) rename {test => archive/test}/boundary-asdf/test.sh (100%) rename {test => archive/test}/btm/scenarios.json (100%) rename {test => archive/test}/btm/test_defaults_debian.sh (100%) rename {test => archive/test}/cert-manager/scenarios.json (100%) rename {test => archive/test}/cert-manager/test_defaults_debian.sh (100%) rename {test => archive/test}/cmctl-asdf/scenarios.json (100%) rename {test => archive/test}/cmctl-asdf/test.sh (100%) rename {test => archive/test}/codenotary-cas/scenarios.json (100%) rename {test => archive/test}/codenotary-cas/test_defaults_debian.sh (100%) rename {test => archive/test}/croc/scenarios.json (100%) rename {test => archive/test}/croc/test_defaults_debian.sh (100%) rename {test => archive/test}/cue-asdf/scenarios.json (100%) rename {test => archive/test}/cue-asdf/test.sh (100%) rename {test => archive/test}/edge-impulse-cli/scenarios.json (100%) rename {test => archive/test}/edge-impulse-cli/test.sh (100%) rename {test => archive/test}/elixir-asdf/scenarios.json (100%) rename {test => archive/test}/elixir-asdf/test_debian_11.sh (100%) rename {test => archive/test}/elixir-asdf/test_debian_12.sh (100%) rename {test => archive/test}/elixir-asdf/test_ubuntu_2004.sh (100%) rename {test => archive/test}/elixir-asdf/test_ubuntu_2204.sh (100%) rename {test => archive/test}/erlang-asdf/scenarios.json (100%) rename {test => archive/test}/erlang-asdf/test.sh (100%) rename {test => archive/test}/erlang-asdf/test_debian_bullseye.sh (100%) rename {test => archive/test}/erlang-asdf/test_ubuntu.sh (100%) rename {test => archive/test}/ffmpeg-homebrew/scenarios.json (100%) rename {test => archive/test}/ffmpeg-homebrew/test.sh (100%) rename {test => archive/test}/gh-release/scenarios.json (100%) rename {test => archive/test}/gh-release/test_act.sh (100%) rename {test => archive/test}/gh-release/test_apiops.sh (100%) rename {test => archive/test}/gh-release/test_defaults_alpine.sh (100%) rename {test => archive/test}/gh-release/test_defaults_debian.sh (100%) rename {test => archive/test}/gh-release/test_etcd.sh (100%) rename {test => archive/test}/gh-release/test_no_git.sh (100%) rename {test => archive/test}/gh-release/test_powershell.sh (100%) rename {test => archive/test}/homebrew-package/scenarios.json (100%) rename {test => archive/test}/homebrew-package/test_file_limit.sh (100%) rename {test => archive/test}/homebrew-package/test_git_based_version.sh (100%) rename {test => archive/test}/homebrew-package/test_latest.sh (100%) rename {test => archive/test}/homebrew-package/test_specific_version.sh (100%) rename {test => archive/test}/homebrew-package/test_universal.sh (100%) rename {test => archive/test}/ko/scenarios.json (100%) rename {test => archive/test}/ko/test_defaults_debian.sh (100%) rename {test => archive/test}/kubescape/scenarios.json (100%) rename {test => archive/test}/kubescape/test_defaults_debian.sh (100%) rename {test => archive/test}/mage/scenarios.json (100%) rename {test => archive/test}/mage/test_defaults_debian.sh (100%) rename {test => archive/test}/meson-asdf/scenarios.json (100%) rename {test => archive/test}/meson-asdf/test.sh (100%) rename {test => archive/test}/meteor-cli/scenarios.json (100%) rename {test => archive/test}/meteor-cli/test.sh (100%) rename {test => archive/test}/nushell/scenarios.json (100%) rename {test => archive/test}/nushell/test_defaults_debian.sh (100%) rename {test => archive/test}/ory-keto/scenarios.json (100%) rename {test => archive/test}/ory-keto/test_defaults_debian.sh (100%) rename {test => archive/test}/pip-audit/scenarios.json (100%) rename {test => archive/test}/pip-audit/test_defaults.sh (100%) rename {test => archive/test}/porter/install_mixins.sh (100%) rename {test => archive/test}/porter/scenarios.json (100%) rename {test => archive/test}/porter/test.sh (100%) rename {test => archive/test}/postgres-asdf/scenarios.json (100%) rename {test => archive/test}/postgres-asdf/test.sh (100%) rename {test => archive/test}/rabbitmq-asdf/scenarios.json (100%) rename {test => archive/test}/rabbitmq-asdf/test.sh (100%) rename {test => archive/test}/scala-asdf/scenarios.json (100%) rename {test => archive/test}/scala-asdf/test.sh (100%) rename {test => archive/test}/serverless/scenarios.json (100%) rename {test => archive/test}/serverless/test.sh (100%) rename {test => archive/test}/sqlfmt/scenarios.json (100%) rename {test => archive/test}/sqlfmt/test.sh (100%) rename {test => archive/test}/syncthing/scenarios.json (100%) rename {test => archive/test}/syncthing/test_defaults_debian.sh (100%) rename {test => archive/test}/trivy/scenarios.json (100%) rename {test => archive/test}/trivy/test_defaults_debian.sh (100%) rename {test => archive/test}/vertx-sdkman/scenarios.json (100%) rename {test => archive/test}/vertx-sdkman/test.sh (100%) rename {test => archive/test}/visualvm-sdkman/scenarios.json (100%) rename {test => archive/test}/visualvm-sdkman/test.sh (100%) rename {test => archive/test}/volta/scenarios.json (100%) rename {test => archive/test}/volta/test_defaults_debian.sh (100%) rename {test => archive/test}/xplr/scenarios.json (100%) rename {test => archive/test}/xplr/test_defaults_debian.sh (100%) diff --git a/archive/actions-runner-noexternals/README.md b/archive/src/actions-runner-noexternals/README.md similarity index 100% rename from archive/actions-runner-noexternals/README.md rename to archive/src/actions-runner-noexternals/README.md diff --git a/archive/actions-runner-noexternals/devcontainer-feature.json b/archive/src/actions-runner-noexternals/devcontainer-feature.json similarity index 100% rename from archive/actions-runner-noexternals/devcontainer-feature.json rename to archive/src/actions-runner-noexternals/devcontainer-feature.json diff --git a/archive/actions-runner-noexternals/install.sh b/archive/src/actions-runner-noexternals/install.sh similarity index 100% rename from archive/actions-runner-noexternals/install.sh rename to archive/src/actions-runner-noexternals/install.sh diff --git a/archive/actions-runner-noexternals/library_scripts.sh b/archive/src/actions-runner-noexternals/library_scripts.sh similarity index 100% rename from archive/actions-runner-noexternals/library_scripts.sh rename to archive/src/actions-runner-noexternals/library_scripts.sh diff --git a/archive/actions-runner-noruntime-noexternals/README.md b/archive/src/actions-runner-noruntime-noexternals/README.md similarity index 100% rename from archive/actions-runner-noruntime-noexternals/README.md rename to archive/src/actions-runner-noruntime-noexternals/README.md diff --git a/archive/actions-runner-noruntime-noexternals/devcontainer-feature.json b/archive/src/actions-runner-noruntime-noexternals/devcontainer-feature.json similarity index 100% rename from archive/actions-runner-noruntime-noexternals/devcontainer-feature.json rename to archive/src/actions-runner-noruntime-noexternals/devcontainer-feature.json diff --git a/archive/actions-runner-noruntime-noexternals/install.sh b/archive/src/actions-runner-noruntime-noexternals/install.sh similarity index 100% rename from archive/actions-runner-noruntime-noexternals/install.sh rename to archive/src/actions-runner-noruntime-noexternals/install.sh diff --git a/archive/actions-runner-noruntime-noexternals/library_scripts.sh b/archive/src/actions-runner-noruntime-noexternals/library_scripts.sh similarity index 100% rename from archive/actions-runner-noruntime-noexternals/library_scripts.sh rename to archive/src/actions-runner-noruntime-noexternals/library_scripts.sh diff --git a/archive/actions-runner-noruntime/README.md b/archive/src/actions-runner-noruntime/README.md similarity index 100% rename from archive/actions-runner-noruntime/README.md rename to archive/src/actions-runner-noruntime/README.md diff --git a/archive/actions-runner-noruntime/devcontainer-feature.json b/archive/src/actions-runner-noruntime/devcontainer-feature.json similarity index 100% rename from archive/actions-runner-noruntime/devcontainer-feature.json rename to archive/src/actions-runner-noruntime/devcontainer-feature.json diff --git a/archive/actions-runner-noruntime/install.sh b/archive/src/actions-runner-noruntime/install.sh similarity index 100% rename from archive/actions-runner-noruntime/install.sh rename to archive/src/actions-runner-noruntime/install.sh diff --git a/archive/actions-runner-noruntime/library_scripts.sh b/archive/src/actions-runner-noruntime/library_scripts.sh similarity index 100% rename from archive/actions-runner-noruntime/library_scripts.sh rename to archive/src/actions-runner-noruntime/library_scripts.sh diff --git a/archive/age-keygen/README.md b/archive/src/age-keygen/README.md similarity index 100% rename from archive/age-keygen/README.md rename to archive/src/age-keygen/README.md diff --git a/archive/age-keygen/devcontainer-feature.json b/archive/src/age-keygen/devcontainer-feature.json similarity index 100% rename from archive/age-keygen/devcontainer-feature.json rename to archive/src/age-keygen/devcontainer-feature.json diff --git a/archive/age-keygen/install.sh b/archive/src/age-keygen/install.sh similarity index 100% rename from archive/age-keygen/install.sh rename to archive/src/age-keygen/install.sh diff --git a/archive/age-keygen/library_scripts.sh b/archive/src/age-keygen/library_scripts.sh similarity index 100% rename from archive/age-keygen/library_scripts.sh rename to archive/src/age-keygen/library_scripts.sh diff --git a/archive/age/README.md b/archive/src/age/README.md similarity index 100% rename from archive/age/README.md rename to archive/src/age/README.md diff --git a/archive/age/devcontainer-feature.json b/archive/src/age/devcontainer-feature.json similarity index 100% rename from archive/age/devcontainer-feature.json rename to archive/src/age/devcontainer-feature.json diff --git a/archive/age/install.sh b/archive/src/age/install.sh similarity index 100% rename from archive/age/install.sh rename to archive/src/age/install.sh diff --git a/archive/age/library_scripts.sh b/archive/src/age/library_scripts.sh similarity index 100% rename from archive/age/library_scripts.sh rename to archive/src/age/library_scripts.sh diff --git a/archive/airplane-cli/README.md b/archive/src/airplane-cli/README.md similarity index 100% rename from archive/airplane-cli/README.md rename to archive/src/airplane-cli/README.md diff --git a/archive/airplane-cli/devcontainer-feature.json b/archive/src/airplane-cli/devcontainer-feature.json similarity index 100% rename from archive/airplane-cli/devcontainer-feature.json rename to archive/src/airplane-cli/devcontainer-feature.json diff --git a/archive/airplane-cli/install.sh b/archive/src/airplane-cli/install.sh similarity index 100% rename from archive/airplane-cli/install.sh rename to archive/src/airplane-cli/install.sh diff --git a/archive/airplane-cli/library_scripts.sh b/archive/src/airplane-cli/library_scripts.sh similarity index 100% rename from archive/airplane-cli/library_scripts.sh rename to archive/src/airplane-cli/library_scripts.sh diff --git a/archive/bitwarden-cli/README.md b/archive/src/bitwarden-cli/README.md similarity index 100% rename from archive/bitwarden-cli/README.md rename to archive/src/bitwarden-cli/README.md diff --git a/archive/bitwarden-cli/devcontainer-feature.json b/archive/src/bitwarden-cli/devcontainer-feature.json similarity index 100% rename from archive/bitwarden-cli/devcontainer-feature.json rename to archive/src/bitwarden-cli/devcontainer-feature.json diff --git a/archive/bitwarden-cli/install.sh b/archive/src/bitwarden-cli/install.sh similarity index 100% rename from archive/bitwarden-cli/install.sh rename to archive/src/bitwarden-cli/install.sh diff --git a/archive/bitwarden-cli/library_scripts.sh b/archive/src/bitwarden-cli/library_scripts.sh similarity index 100% rename from archive/bitwarden-cli/library_scripts.sh rename to archive/src/bitwarden-cli/library_scripts.sh diff --git a/archive/boundary-asdf/README.md b/archive/src/boundary-asdf/README.md similarity index 100% rename from archive/boundary-asdf/README.md rename to archive/src/boundary-asdf/README.md diff --git a/archive/boundary-asdf/devcontainer-feature.json b/archive/src/boundary-asdf/devcontainer-feature.json similarity index 100% rename from archive/boundary-asdf/devcontainer-feature.json rename to archive/src/boundary-asdf/devcontainer-feature.json diff --git a/archive/boundary-asdf/install.sh b/archive/src/boundary-asdf/install.sh similarity index 100% rename from archive/boundary-asdf/install.sh rename to archive/src/boundary-asdf/install.sh diff --git a/archive/boundary-asdf/library_scripts.sh b/archive/src/boundary-asdf/library_scripts.sh similarity index 100% rename from archive/boundary-asdf/library_scripts.sh rename to archive/src/boundary-asdf/library_scripts.sh diff --git a/archive/btm/README.md b/archive/src/btm/README.md similarity index 100% rename from archive/btm/README.md rename to archive/src/btm/README.md diff --git a/archive/btm/devcontainer-feature.json b/archive/src/btm/devcontainer-feature.json similarity index 100% rename from archive/btm/devcontainer-feature.json rename to archive/src/btm/devcontainer-feature.json diff --git a/archive/btm/install.sh b/archive/src/btm/install.sh similarity index 100% rename from archive/btm/install.sh rename to archive/src/btm/install.sh diff --git a/archive/btm/library_scripts.sh b/archive/src/btm/library_scripts.sh similarity index 100% rename from archive/btm/library_scripts.sh rename to archive/src/btm/library_scripts.sh diff --git a/archive/cert-manager/README.md b/archive/src/cert-manager/README.md similarity index 100% rename from archive/cert-manager/README.md rename to archive/src/cert-manager/README.md diff --git a/archive/cert-manager/devcontainer-feature.json b/archive/src/cert-manager/devcontainer-feature.json similarity index 100% rename from archive/cert-manager/devcontainer-feature.json rename to archive/src/cert-manager/devcontainer-feature.json diff --git a/archive/cert-manager/install.sh b/archive/src/cert-manager/install.sh similarity index 100% rename from archive/cert-manager/install.sh rename to archive/src/cert-manager/install.sh diff --git a/archive/cert-manager/library_scripts.sh b/archive/src/cert-manager/library_scripts.sh similarity index 100% rename from archive/cert-manager/library_scripts.sh rename to archive/src/cert-manager/library_scripts.sh diff --git a/archive/cmctl-asdf/README.md b/archive/src/cmctl-asdf/README.md similarity index 100% rename from archive/cmctl-asdf/README.md rename to archive/src/cmctl-asdf/README.md diff --git a/archive/cmctl-asdf/devcontainer-feature.json b/archive/src/cmctl-asdf/devcontainer-feature.json similarity index 100% rename from archive/cmctl-asdf/devcontainer-feature.json rename to archive/src/cmctl-asdf/devcontainer-feature.json diff --git a/archive/cmctl-asdf/install.sh b/archive/src/cmctl-asdf/install.sh similarity index 100% rename from archive/cmctl-asdf/install.sh rename to archive/src/cmctl-asdf/install.sh diff --git a/archive/cmctl-asdf/library_scripts.sh b/archive/src/cmctl-asdf/library_scripts.sh similarity index 100% rename from archive/cmctl-asdf/library_scripts.sh rename to archive/src/cmctl-asdf/library_scripts.sh diff --git a/archive/codenotary-cas/README.md b/archive/src/codenotary-cas/README.md similarity index 100% rename from archive/codenotary-cas/README.md rename to archive/src/codenotary-cas/README.md diff --git a/archive/codenotary-cas/devcontainer-feature.json b/archive/src/codenotary-cas/devcontainer-feature.json similarity index 100% rename from archive/codenotary-cas/devcontainer-feature.json rename to archive/src/codenotary-cas/devcontainer-feature.json diff --git a/archive/codenotary-cas/install.sh b/archive/src/codenotary-cas/install.sh similarity index 100% rename from archive/codenotary-cas/install.sh rename to archive/src/codenotary-cas/install.sh diff --git a/archive/codenotary-cas/library_scripts.sh b/archive/src/codenotary-cas/library_scripts.sh similarity index 100% rename from archive/codenotary-cas/library_scripts.sh rename to archive/src/codenotary-cas/library_scripts.sh diff --git a/archive/croc/README.md b/archive/src/croc/README.md similarity index 100% rename from archive/croc/README.md rename to archive/src/croc/README.md diff --git a/archive/croc/devcontainer-feature.json b/archive/src/croc/devcontainer-feature.json similarity index 100% rename from archive/croc/devcontainer-feature.json rename to archive/src/croc/devcontainer-feature.json diff --git a/archive/croc/install.sh b/archive/src/croc/install.sh similarity index 100% rename from archive/croc/install.sh rename to archive/src/croc/install.sh diff --git a/archive/croc/library_scripts.sh b/archive/src/croc/library_scripts.sh similarity index 100% rename from archive/croc/library_scripts.sh rename to archive/src/croc/library_scripts.sh diff --git a/archive/cue-asdf/README.md b/archive/src/cue-asdf/README.md similarity index 100% rename from archive/cue-asdf/README.md rename to archive/src/cue-asdf/README.md diff --git a/archive/cue-asdf/devcontainer-feature.json b/archive/src/cue-asdf/devcontainer-feature.json similarity index 100% rename from archive/cue-asdf/devcontainer-feature.json rename to archive/src/cue-asdf/devcontainer-feature.json diff --git a/archive/cue-asdf/install.sh b/archive/src/cue-asdf/install.sh similarity index 100% rename from archive/cue-asdf/install.sh rename to archive/src/cue-asdf/install.sh diff --git a/archive/cue-asdf/library_scripts.sh b/archive/src/cue-asdf/library_scripts.sh similarity index 100% rename from archive/cue-asdf/library_scripts.sh rename to archive/src/cue-asdf/library_scripts.sh diff --git a/archive/edge-impulse-cli/README.md b/archive/src/edge-impulse-cli/README.md similarity index 100% rename from archive/edge-impulse-cli/README.md rename to archive/src/edge-impulse-cli/README.md diff --git a/archive/edge-impulse-cli/devcontainer-feature.json b/archive/src/edge-impulse-cli/devcontainer-feature.json similarity index 100% rename from archive/edge-impulse-cli/devcontainer-feature.json rename to archive/src/edge-impulse-cli/devcontainer-feature.json diff --git a/archive/edge-impulse-cli/install.sh b/archive/src/edge-impulse-cli/install.sh similarity index 100% rename from archive/edge-impulse-cli/install.sh rename to archive/src/edge-impulse-cli/install.sh diff --git a/archive/edge-impulse-cli/library_scripts.sh b/archive/src/edge-impulse-cli/library_scripts.sh similarity index 100% rename from archive/edge-impulse-cli/library_scripts.sh rename to archive/src/edge-impulse-cli/library_scripts.sh diff --git a/archive/elixir-asdf/README.md b/archive/src/elixir-asdf/README.md similarity index 100% rename from archive/elixir-asdf/README.md rename to archive/src/elixir-asdf/README.md diff --git a/archive/elixir-asdf/devcontainer-feature.json b/archive/src/elixir-asdf/devcontainer-feature.json similarity index 100% rename from archive/elixir-asdf/devcontainer-feature.json rename to archive/src/elixir-asdf/devcontainer-feature.json diff --git a/archive/elixir-asdf/install.sh b/archive/src/elixir-asdf/install.sh similarity index 100% rename from archive/elixir-asdf/install.sh rename to archive/src/elixir-asdf/install.sh diff --git a/archive/elixir-asdf/library_scripts.sh b/archive/src/elixir-asdf/library_scripts.sh similarity index 100% rename from archive/elixir-asdf/library_scripts.sh rename to archive/src/elixir-asdf/library_scripts.sh diff --git a/archive/erlang-asdf/README.md b/archive/src/erlang-asdf/README.md similarity index 100% rename from archive/erlang-asdf/README.md rename to archive/src/erlang-asdf/README.md diff --git a/archive/erlang-asdf/devcontainer-feature.json b/archive/src/erlang-asdf/devcontainer-feature.json similarity index 100% rename from archive/erlang-asdf/devcontainer-feature.json rename to archive/src/erlang-asdf/devcontainer-feature.json diff --git a/archive/erlang-asdf/install.sh b/archive/src/erlang-asdf/install.sh similarity index 100% rename from archive/erlang-asdf/install.sh rename to archive/src/erlang-asdf/install.sh diff --git a/archive/erlang-asdf/library_scripts.sh b/archive/src/erlang-asdf/library_scripts.sh similarity index 100% rename from archive/erlang-asdf/library_scripts.sh rename to archive/src/erlang-asdf/library_scripts.sh diff --git a/archive/ffmpeg-homebrew/README.md b/archive/src/ffmpeg-homebrew/README.md similarity index 100% rename from archive/ffmpeg-homebrew/README.md rename to archive/src/ffmpeg-homebrew/README.md diff --git a/archive/ffmpeg-homebrew/devcontainer-feature.json b/archive/src/ffmpeg-homebrew/devcontainer-feature.json similarity index 100% rename from archive/ffmpeg-homebrew/devcontainer-feature.json rename to archive/src/ffmpeg-homebrew/devcontainer-feature.json diff --git a/archive/ffmpeg-homebrew/install.sh b/archive/src/ffmpeg-homebrew/install.sh similarity index 100% rename from archive/ffmpeg-homebrew/install.sh rename to archive/src/ffmpeg-homebrew/install.sh diff --git a/archive/ffmpeg-homebrew/library_scripts.sh b/archive/src/ffmpeg-homebrew/library_scripts.sh similarity index 100% rename from archive/ffmpeg-homebrew/library_scripts.sh rename to archive/src/ffmpeg-homebrew/library_scripts.sh diff --git a/archive/gh-release/README.md b/archive/src/gh-release/README.md similarity index 100% rename from archive/gh-release/README.md rename to archive/src/gh-release/README.md diff --git a/archive/gh-release/devcontainer-feature.json b/archive/src/gh-release/devcontainer-feature.json similarity index 100% rename from archive/gh-release/devcontainer-feature.json rename to archive/src/gh-release/devcontainer-feature.json diff --git a/archive/gh-release/install.sh b/archive/src/gh-release/install.sh similarity index 100% rename from archive/gh-release/install.sh rename to archive/src/gh-release/install.sh diff --git a/archive/gh-release/library_scripts.sh b/archive/src/gh-release/library_scripts.sh similarity index 100% rename from archive/gh-release/library_scripts.sh rename to archive/src/gh-release/library_scripts.sh diff --git a/archive/homebrew-package/README.md b/archive/src/homebrew-package/README.md similarity index 100% rename from archive/homebrew-package/README.md rename to archive/src/homebrew-package/README.md diff --git a/archive/homebrew-package/devcontainer-feature.json b/archive/src/homebrew-package/devcontainer-feature.json similarity index 100% rename from archive/homebrew-package/devcontainer-feature.json rename to archive/src/homebrew-package/devcontainer-feature.json diff --git a/archive/homebrew-package/install.sh b/archive/src/homebrew-package/install.sh similarity index 100% rename from archive/homebrew-package/install.sh rename to archive/src/homebrew-package/install.sh diff --git a/archive/homebrew-package/library_scripts.sh b/archive/src/homebrew-package/library_scripts.sh similarity index 100% rename from archive/homebrew-package/library_scripts.sh rename to archive/src/homebrew-package/library_scripts.sh diff --git a/archive/ko/README.md b/archive/src/ko/README.md similarity index 100% rename from archive/ko/README.md rename to archive/src/ko/README.md diff --git a/archive/ko/devcontainer-feature.json b/archive/src/ko/devcontainer-feature.json similarity index 100% rename from archive/ko/devcontainer-feature.json rename to archive/src/ko/devcontainer-feature.json diff --git a/archive/ko/install.sh b/archive/src/ko/install.sh similarity index 100% rename from archive/ko/install.sh rename to archive/src/ko/install.sh diff --git a/archive/ko/library_scripts.sh b/archive/src/ko/library_scripts.sh similarity index 100% rename from archive/ko/library_scripts.sh rename to archive/src/ko/library_scripts.sh diff --git a/archive/kubescape/README.md b/archive/src/kubescape/README.md similarity index 100% rename from archive/kubescape/README.md rename to archive/src/kubescape/README.md diff --git a/archive/kubescape/devcontainer-feature.json b/archive/src/kubescape/devcontainer-feature.json similarity index 100% rename from archive/kubescape/devcontainer-feature.json rename to archive/src/kubescape/devcontainer-feature.json diff --git a/archive/kubescape/install.sh b/archive/src/kubescape/install.sh similarity index 100% rename from archive/kubescape/install.sh rename to archive/src/kubescape/install.sh diff --git a/archive/kubescape/library_scripts.sh b/archive/src/kubescape/library_scripts.sh similarity index 100% rename from archive/kubescape/library_scripts.sh rename to archive/src/kubescape/library_scripts.sh diff --git a/archive/mage/README.md b/archive/src/mage/README.md similarity index 100% rename from archive/mage/README.md rename to archive/src/mage/README.md diff --git a/archive/mage/devcontainer-feature.json b/archive/src/mage/devcontainer-feature.json similarity index 100% rename from archive/mage/devcontainer-feature.json rename to archive/src/mage/devcontainer-feature.json diff --git a/archive/mage/install.sh b/archive/src/mage/install.sh similarity index 100% rename from archive/mage/install.sh rename to archive/src/mage/install.sh diff --git a/archive/mage/library_scripts.sh b/archive/src/mage/library_scripts.sh similarity index 100% rename from archive/mage/library_scripts.sh rename to archive/src/mage/library_scripts.sh diff --git a/archive/meson-asdf/README.md b/archive/src/meson-asdf/README.md similarity index 100% rename from archive/meson-asdf/README.md rename to archive/src/meson-asdf/README.md diff --git a/archive/meson-asdf/devcontainer-feature.json b/archive/src/meson-asdf/devcontainer-feature.json similarity index 100% rename from archive/meson-asdf/devcontainer-feature.json rename to archive/src/meson-asdf/devcontainer-feature.json diff --git a/archive/meson-asdf/install.sh b/archive/src/meson-asdf/install.sh similarity index 100% rename from archive/meson-asdf/install.sh rename to archive/src/meson-asdf/install.sh diff --git a/archive/meson-asdf/library_scripts.sh b/archive/src/meson-asdf/library_scripts.sh similarity index 100% rename from archive/meson-asdf/library_scripts.sh rename to archive/src/meson-asdf/library_scripts.sh diff --git a/archive/meteor-cli/README.md b/archive/src/meteor-cli/README.md similarity index 100% rename from archive/meteor-cli/README.md rename to archive/src/meteor-cli/README.md diff --git a/archive/meteor-cli/devcontainer-feature.json b/archive/src/meteor-cli/devcontainer-feature.json similarity index 100% rename from archive/meteor-cli/devcontainer-feature.json rename to archive/src/meteor-cli/devcontainer-feature.json diff --git a/archive/meteor-cli/install.sh b/archive/src/meteor-cli/install.sh similarity index 100% rename from archive/meteor-cli/install.sh rename to archive/src/meteor-cli/install.sh diff --git a/archive/meteor-cli/library_scripts.sh b/archive/src/meteor-cli/library_scripts.sh similarity index 100% rename from archive/meteor-cli/library_scripts.sh rename to archive/src/meteor-cli/library_scripts.sh diff --git a/archive/nushell/README.md b/archive/src/nushell/README.md similarity index 100% rename from archive/nushell/README.md rename to archive/src/nushell/README.md diff --git a/archive/nushell/devcontainer-feature.json b/archive/src/nushell/devcontainer-feature.json similarity index 100% rename from archive/nushell/devcontainer-feature.json rename to archive/src/nushell/devcontainer-feature.json diff --git a/archive/nushell/install.sh b/archive/src/nushell/install.sh similarity index 100% rename from archive/nushell/install.sh rename to archive/src/nushell/install.sh diff --git a/archive/nushell/library_scripts.sh b/archive/src/nushell/library_scripts.sh similarity index 100% rename from archive/nushell/library_scripts.sh rename to archive/src/nushell/library_scripts.sh diff --git a/archive/ory-keto/README.md b/archive/src/ory-keto/README.md similarity index 100% rename from archive/ory-keto/README.md rename to archive/src/ory-keto/README.md diff --git a/archive/ory-keto/devcontainer-feature.json b/archive/src/ory-keto/devcontainer-feature.json similarity index 100% rename from archive/ory-keto/devcontainer-feature.json rename to archive/src/ory-keto/devcontainer-feature.json diff --git a/archive/ory-keto/install.sh b/archive/src/ory-keto/install.sh similarity index 100% rename from archive/ory-keto/install.sh rename to archive/src/ory-keto/install.sh diff --git a/archive/ory-keto/library_scripts.sh b/archive/src/ory-keto/library_scripts.sh similarity index 100% rename from archive/ory-keto/library_scripts.sh rename to archive/src/ory-keto/library_scripts.sh diff --git a/archive/pip-audit/README.md b/archive/src/pip-audit/README.md similarity index 100% rename from archive/pip-audit/README.md rename to archive/src/pip-audit/README.md diff --git a/archive/pip-audit/devcontainer-feature.json b/archive/src/pip-audit/devcontainer-feature.json similarity index 100% rename from archive/pip-audit/devcontainer-feature.json rename to archive/src/pip-audit/devcontainer-feature.json diff --git a/archive/pip-audit/install.sh b/archive/src/pip-audit/install.sh similarity index 100% rename from archive/pip-audit/install.sh rename to archive/src/pip-audit/install.sh diff --git a/archive/pip-audit/library_scripts.sh b/archive/src/pip-audit/library_scripts.sh similarity index 100% rename from archive/pip-audit/library_scripts.sh rename to archive/src/pip-audit/library_scripts.sh diff --git a/archive/porter/README.md b/archive/src/porter/README.md similarity index 100% rename from archive/porter/README.md rename to archive/src/porter/README.md diff --git a/archive/porter/devcontainer-feature.json b/archive/src/porter/devcontainer-feature.json similarity index 100% rename from archive/porter/devcontainer-feature.json rename to archive/src/porter/devcontainer-feature.json diff --git a/archive/porter/install.sh b/archive/src/porter/install.sh similarity index 100% rename from archive/porter/install.sh rename to archive/src/porter/install.sh diff --git a/archive/porter/library_scripts.sh b/archive/src/porter/library_scripts.sh similarity index 100% rename from archive/porter/library_scripts.sh rename to archive/src/porter/library_scripts.sh diff --git a/archive/postgres-asdf/README.md b/archive/src/postgres-asdf/README.md similarity index 100% rename from archive/postgres-asdf/README.md rename to archive/src/postgres-asdf/README.md diff --git a/archive/postgres-asdf/devcontainer-feature.json b/archive/src/postgres-asdf/devcontainer-feature.json similarity index 100% rename from archive/postgres-asdf/devcontainer-feature.json rename to archive/src/postgres-asdf/devcontainer-feature.json diff --git a/archive/postgres-asdf/install.sh b/archive/src/postgres-asdf/install.sh similarity index 100% rename from archive/postgres-asdf/install.sh rename to archive/src/postgres-asdf/install.sh diff --git a/archive/postgres-asdf/library_scripts.sh b/archive/src/postgres-asdf/library_scripts.sh similarity index 100% rename from archive/postgres-asdf/library_scripts.sh rename to archive/src/postgres-asdf/library_scripts.sh diff --git a/archive/rabbitmq-asdf/README.md b/archive/src/rabbitmq-asdf/README.md similarity index 100% rename from archive/rabbitmq-asdf/README.md rename to archive/src/rabbitmq-asdf/README.md diff --git a/archive/rabbitmq-asdf/devcontainer-feature.json b/archive/src/rabbitmq-asdf/devcontainer-feature.json similarity index 100% rename from archive/rabbitmq-asdf/devcontainer-feature.json rename to archive/src/rabbitmq-asdf/devcontainer-feature.json diff --git a/archive/rabbitmq-asdf/install.sh b/archive/src/rabbitmq-asdf/install.sh similarity index 100% rename from archive/rabbitmq-asdf/install.sh rename to archive/src/rabbitmq-asdf/install.sh diff --git a/archive/rabbitmq-asdf/library_scripts.sh b/archive/src/rabbitmq-asdf/library_scripts.sh similarity index 100% rename from archive/rabbitmq-asdf/library_scripts.sh rename to archive/src/rabbitmq-asdf/library_scripts.sh diff --git a/archive/scala-asdf/README.md b/archive/src/scala-asdf/README.md similarity index 100% rename from archive/scala-asdf/README.md rename to archive/src/scala-asdf/README.md diff --git a/archive/scala-asdf/devcontainer-feature.json b/archive/src/scala-asdf/devcontainer-feature.json similarity index 100% rename from archive/scala-asdf/devcontainer-feature.json rename to archive/src/scala-asdf/devcontainer-feature.json diff --git a/archive/scala-asdf/install.sh b/archive/src/scala-asdf/install.sh similarity index 100% rename from archive/scala-asdf/install.sh rename to archive/src/scala-asdf/install.sh diff --git a/archive/scala-asdf/library_scripts.sh b/archive/src/scala-asdf/library_scripts.sh similarity index 100% rename from archive/scala-asdf/library_scripts.sh rename to archive/src/scala-asdf/library_scripts.sh diff --git a/archive/serverless/README.md b/archive/src/serverless/README.md similarity index 100% rename from archive/serverless/README.md rename to archive/src/serverless/README.md diff --git a/archive/serverless/devcontainer-feature.json b/archive/src/serverless/devcontainer-feature.json similarity index 100% rename from archive/serverless/devcontainer-feature.json rename to archive/src/serverless/devcontainer-feature.json diff --git a/archive/serverless/install.sh b/archive/src/serverless/install.sh similarity index 100% rename from archive/serverless/install.sh rename to archive/src/serverless/install.sh diff --git a/archive/serverless/library_scripts.sh b/archive/src/serverless/library_scripts.sh similarity index 100% rename from archive/serverless/library_scripts.sh rename to archive/src/serverless/library_scripts.sh diff --git a/archive/sqlfmt/README.md b/archive/src/sqlfmt/README.md similarity index 100% rename from archive/sqlfmt/README.md rename to archive/src/sqlfmt/README.md diff --git a/archive/sqlfmt/devcontainer-feature.json b/archive/src/sqlfmt/devcontainer-feature.json similarity index 100% rename from archive/sqlfmt/devcontainer-feature.json rename to archive/src/sqlfmt/devcontainer-feature.json diff --git a/archive/sqlfmt/install.sh b/archive/src/sqlfmt/install.sh similarity index 100% rename from archive/sqlfmt/install.sh rename to archive/src/sqlfmt/install.sh diff --git a/archive/sqlfmt/library_scripts.sh b/archive/src/sqlfmt/library_scripts.sh similarity index 100% rename from archive/sqlfmt/library_scripts.sh rename to archive/src/sqlfmt/library_scripts.sh diff --git a/archive/syncthing/README.md b/archive/src/syncthing/README.md similarity index 100% rename from archive/syncthing/README.md rename to archive/src/syncthing/README.md diff --git a/archive/syncthing/devcontainer-feature.json b/archive/src/syncthing/devcontainer-feature.json similarity index 100% rename from archive/syncthing/devcontainer-feature.json rename to archive/src/syncthing/devcontainer-feature.json diff --git a/archive/syncthing/install.sh b/archive/src/syncthing/install.sh similarity index 100% rename from archive/syncthing/install.sh rename to archive/src/syncthing/install.sh diff --git a/archive/syncthing/library_scripts.sh b/archive/src/syncthing/library_scripts.sh similarity index 100% rename from archive/syncthing/library_scripts.sh rename to archive/src/syncthing/library_scripts.sh diff --git a/archive/trivy/README.md b/archive/src/trivy/README.md similarity index 100% rename from archive/trivy/README.md rename to archive/src/trivy/README.md diff --git a/archive/trivy/devcontainer-feature.json b/archive/src/trivy/devcontainer-feature.json similarity index 100% rename from archive/trivy/devcontainer-feature.json rename to archive/src/trivy/devcontainer-feature.json diff --git a/archive/trivy/install.sh b/archive/src/trivy/install.sh similarity index 100% rename from archive/trivy/install.sh rename to archive/src/trivy/install.sh diff --git a/archive/trivy/library_scripts.sh b/archive/src/trivy/library_scripts.sh similarity index 100% rename from archive/trivy/library_scripts.sh rename to archive/src/trivy/library_scripts.sh diff --git a/archive/vertx-sdkman/README.md b/archive/src/vertx-sdkman/README.md similarity index 100% rename from archive/vertx-sdkman/README.md rename to archive/src/vertx-sdkman/README.md diff --git a/archive/vertx-sdkman/devcontainer-feature.json b/archive/src/vertx-sdkman/devcontainer-feature.json similarity index 100% rename from archive/vertx-sdkman/devcontainer-feature.json rename to archive/src/vertx-sdkman/devcontainer-feature.json diff --git a/archive/vertx-sdkman/install.sh b/archive/src/vertx-sdkman/install.sh similarity index 100% rename from archive/vertx-sdkman/install.sh rename to archive/src/vertx-sdkman/install.sh diff --git a/archive/vertx-sdkman/library_scripts.sh b/archive/src/vertx-sdkman/library_scripts.sh similarity index 100% rename from archive/vertx-sdkman/library_scripts.sh rename to archive/src/vertx-sdkman/library_scripts.sh diff --git a/archive/visualvm-sdkman/README.md b/archive/src/visualvm-sdkman/README.md similarity index 100% rename from archive/visualvm-sdkman/README.md rename to archive/src/visualvm-sdkman/README.md diff --git a/archive/visualvm-sdkman/devcontainer-feature.json b/archive/src/visualvm-sdkman/devcontainer-feature.json similarity index 100% rename from archive/visualvm-sdkman/devcontainer-feature.json rename to archive/src/visualvm-sdkman/devcontainer-feature.json diff --git a/archive/visualvm-sdkman/install.sh b/archive/src/visualvm-sdkman/install.sh similarity index 100% rename from archive/visualvm-sdkman/install.sh rename to archive/src/visualvm-sdkman/install.sh diff --git a/archive/visualvm-sdkman/library_scripts.sh b/archive/src/visualvm-sdkman/library_scripts.sh similarity index 100% rename from archive/visualvm-sdkman/library_scripts.sh rename to archive/src/visualvm-sdkman/library_scripts.sh diff --git a/archive/volta/README.md b/archive/src/volta/README.md similarity index 100% rename from archive/volta/README.md rename to archive/src/volta/README.md diff --git a/archive/volta/devcontainer-feature.json b/archive/src/volta/devcontainer-feature.json similarity index 100% rename from archive/volta/devcontainer-feature.json rename to archive/src/volta/devcontainer-feature.json diff --git a/archive/volta/install.sh b/archive/src/volta/install.sh similarity index 100% rename from archive/volta/install.sh rename to archive/src/volta/install.sh diff --git a/archive/volta/library_scripts.sh b/archive/src/volta/library_scripts.sh similarity index 100% rename from archive/volta/library_scripts.sh rename to archive/src/volta/library_scripts.sh diff --git a/archive/xplr/README.md b/archive/src/xplr/README.md similarity index 100% rename from archive/xplr/README.md rename to archive/src/xplr/README.md diff --git a/archive/xplr/devcontainer-feature.json b/archive/src/xplr/devcontainer-feature.json similarity index 100% rename from archive/xplr/devcontainer-feature.json rename to archive/src/xplr/devcontainer-feature.json diff --git a/archive/xplr/install.sh b/archive/src/xplr/install.sh similarity index 100% rename from archive/xplr/install.sh rename to archive/src/xplr/install.sh diff --git a/archive/xplr/library_scripts.sh b/archive/src/xplr/library_scripts.sh similarity index 100% rename from archive/xplr/library_scripts.sh rename to archive/src/xplr/library_scripts.sh diff --git a/test/actions-runner-noexternals/scenarios.json b/archive/test/actions-runner-noexternals/scenarios.json similarity index 100% rename from test/actions-runner-noexternals/scenarios.json rename to archive/test/actions-runner-noexternals/scenarios.json diff --git a/test/actions-runner-noexternals/test_defaults_debian.sh b/archive/test/actions-runner-noexternals/test_defaults_debian.sh similarity index 100% rename from test/actions-runner-noexternals/test_defaults_debian.sh rename to archive/test/actions-runner-noexternals/test_defaults_debian.sh diff --git a/test/actions-runner-noruntime-noexternals/scenarios.json b/archive/test/actions-runner-noruntime-noexternals/scenarios.json similarity index 100% rename from test/actions-runner-noruntime-noexternals/scenarios.json rename to archive/test/actions-runner-noruntime-noexternals/scenarios.json diff --git a/test/actions-runner-noruntime-noexternals/test_defaults_debian.sh b/archive/test/actions-runner-noruntime-noexternals/test_defaults_debian.sh similarity index 100% rename from test/actions-runner-noruntime-noexternals/test_defaults_debian.sh rename to archive/test/actions-runner-noruntime-noexternals/test_defaults_debian.sh diff --git a/test/actions-runner-noruntime/scenarios.json b/archive/test/actions-runner-noruntime/scenarios.json similarity index 100% rename from test/actions-runner-noruntime/scenarios.json rename to archive/test/actions-runner-noruntime/scenarios.json diff --git a/test/actions-runner-noruntime/test_defaults_debian.sh b/archive/test/actions-runner-noruntime/test_defaults_debian.sh similarity index 100% rename from test/actions-runner-noruntime/test_defaults_debian.sh rename to archive/test/actions-runner-noruntime/test_defaults_debian.sh diff --git a/test/age-keygen/scenarios.json b/archive/test/age-keygen/scenarios.json similarity index 100% rename from test/age-keygen/scenarios.json rename to archive/test/age-keygen/scenarios.json diff --git a/test/age-keygen/test_defaults_debian.sh b/archive/test/age-keygen/test_defaults_debian.sh similarity index 100% rename from test/age-keygen/test_defaults_debian.sh rename to archive/test/age-keygen/test_defaults_debian.sh diff --git a/test/age/scenarios.json b/archive/test/age/scenarios.json similarity index 100% rename from test/age/scenarios.json rename to archive/test/age/scenarios.json diff --git a/test/age/test_defaults_debian.sh b/archive/test/age/test_defaults_debian.sh similarity index 100% rename from test/age/test_defaults_debian.sh rename to archive/test/age/test_defaults_debian.sh diff --git a/test/airplane-cli/scenarios.json b/archive/test/airplane-cli/scenarios.json similarity index 100% rename from test/airplane-cli/scenarios.json rename to archive/test/airplane-cli/scenarios.json diff --git a/test/airplane-cli/test_defaults_debian.sh b/archive/test/airplane-cli/test_defaults_debian.sh similarity index 100% rename from test/airplane-cli/test_defaults_debian.sh rename to archive/test/airplane-cli/test_defaults_debian.sh diff --git a/test/bitwarden-cli/scenarios.json b/archive/test/bitwarden-cli/scenarios.json similarity index 100% rename from test/bitwarden-cli/scenarios.json rename to archive/test/bitwarden-cli/scenarios.json diff --git a/test/bitwarden-cli/test_defaults_debian.sh b/archive/test/bitwarden-cli/test_defaults_debian.sh similarity index 100% rename from test/bitwarden-cli/test_defaults_debian.sh rename to archive/test/bitwarden-cli/test_defaults_debian.sh diff --git a/test/boundary-asdf/scenarios.json b/archive/test/boundary-asdf/scenarios.json similarity index 100% rename from test/boundary-asdf/scenarios.json rename to archive/test/boundary-asdf/scenarios.json diff --git a/test/boundary-asdf/test.sh b/archive/test/boundary-asdf/test.sh similarity index 100% rename from test/boundary-asdf/test.sh rename to archive/test/boundary-asdf/test.sh diff --git a/test/btm/scenarios.json b/archive/test/btm/scenarios.json similarity index 100% rename from test/btm/scenarios.json rename to archive/test/btm/scenarios.json diff --git a/test/btm/test_defaults_debian.sh b/archive/test/btm/test_defaults_debian.sh similarity index 100% rename from test/btm/test_defaults_debian.sh rename to archive/test/btm/test_defaults_debian.sh diff --git a/test/cert-manager/scenarios.json b/archive/test/cert-manager/scenarios.json similarity index 100% rename from test/cert-manager/scenarios.json rename to archive/test/cert-manager/scenarios.json diff --git a/test/cert-manager/test_defaults_debian.sh b/archive/test/cert-manager/test_defaults_debian.sh similarity index 100% rename from test/cert-manager/test_defaults_debian.sh rename to archive/test/cert-manager/test_defaults_debian.sh diff --git a/test/cmctl-asdf/scenarios.json b/archive/test/cmctl-asdf/scenarios.json similarity index 100% rename from test/cmctl-asdf/scenarios.json rename to archive/test/cmctl-asdf/scenarios.json diff --git a/test/cmctl-asdf/test.sh b/archive/test/cmctl-asdf/test.sh similarity index 100% rename from test/cmctl-asdf/test.sh rename to archive/test/cmctl-asdf/test.sh diff --git a/test/codenotary-cas/scenarios.json b/archive/test/codenotary-cas/scenarios.json similarity index 100% rename from test/codenotary-cas/scenarios.json rename to archive/test/codenotary-cas/scenarios.json diff --git a/test/codenotary-cas/test_defaults_debian.sh b/archive/test/codenotary-cas/test_defaults_debian.sh similarity index 100% rename from test/codenotary-cas/test_defaults_debian.sh rename to archive/test/codenotary-cas/test_defaults_debian.sh diff --git a/test/croc/scenarios.json b/archive/test/croc/scenarios.json similarity index 100% rename from test/croc/scenarios.json rename to archive/test/croc/scenarios.json diff --git a/test/croc/test_defaults_debian.sh b/archive/test/croc/test_defaults_debian.sh similarity index 100% rename from test/croc/test_defaults_debian.sh rename to archive/test/croc/test_defaults_debian.sh diff --git a/test/cue-asdf/scenarios.json b/archive/test/cue-asdf/scenarios.json similarity index 100% rename from test/cue-asdf/scenarios.json rename to archive/test/cue-asdf/scenarios.json diff --git a/test/cue-asdf/test.sh b/archive/test/cue-asdf/test.sh similarity index 100% rename from test/cue-asdf/test.sh rename to archive/test/cue-asdf/test.sh diff --git a/test/edge-impulse-cli/scenarios.json b/archive/test/edge-impulse-cli/scenarios.json similarity index 100% rename from test/edge-impulse-cli/scenarios.json rename to archive/test/edge-impulse-cli/scenarios.json diff --git a/test/edge-impulse-cli/test.sh b/archive/test/edge-impulse-cli/test.sh similarity index 100% rename from test/edge-impulse-cli/test.sh rename to archive/test/edge-impulse-cli/test.sh diff --git a/test/elixir-asdf/scenarios.json b/archive/test/elixir-asdf/scenarios.json similarity index 100% rename from test/elixir-asdf/scenarios.json rename to archive/test/elixir-asdf/scenarios.json diff --git a/test/elixir-asdf/test_debian_11.sh b/archive/test/elixir-asdf/test_debian_11.sh similarity index 100% rename from test/elixir-asdf/test_debian_11.sh rename to archive/test/elixir-asdf/test_debian_11.sh diff --git a/test/elixir-asdf/test_debian_12.sh b/archive/test/elixir-asdf/test_debian_12.sh similarity index 100% rename from test/elixir-asdf/test_debian_12.sh rename to archive/test/elixir-asdf/test_debian_12.sh diff --git a/test/elixir-asdf/test_ubuntu_2004.sh b/archive/test/elixir-asdf/test_ubuntu_2004.sh similarity index 100% rename from test/elixir-asdf/test_ubuntu_2004.sh rename to archive/test/elixir-asdf/test_ubuntu_2004.sh diff --git a/test/elixir-asdf/test_ubuntu_2204.sh b/archive/test/elixir-asdf/test_ubuntu_2204.sh similarity index 100% rename from test/elixir-asdf/test_ubuntu_2204.sh rename to archive/test/elixir-asdf/test_ubuntu_2204.sh diff --git a/test/erlang-asdf/scenarios.json b/archive/test/erlang-asdf/scenarios.json similarity index 100% rename from test/erlang-asdf/scenarios.json rename to archive/test/erlang-asdf/scenarios.json diff --git a/test/erlang-asdf/test.sh b/archive/test/erlang-asdf/test.sh similarity index 100% rename from test/erlang-asdf/test.sh rename to archive/test/erlang-asdf/test.sh diff --git a/test/erlang-asdf/test_debian_bullseye.sh b/archive/test/erlang-asdf/test_debian_bullseye.sh similarity index 100% rename from test/erlang-asdf/test_debian_bullseye.sh rename to archive/test/erlang-asdf/test_debian_bullseye.sh diff --git a/test/erlang-asdf/test_ubuntu.sh b/archive/test/erlang-asdf/test_ubuntu.sh similarity index 100% rename from test/erlang-asdf/test_ubuntu.sh rename to archive/test/erlang-asdf/test_ubuntu.sh diff --git a/test/ffmpeg-homebrew/scenarios.json b/archive/test/ffmpeg-homebrew/scenarios.json similarity index 100% rename from test/ffmpeg-homebrew/scenarios.json rename to archive/test/ffmpeg-homebrew/scenarios.json diff --git a/test/ffmpeg-homebrew/test.sh b/archive/test/ffmpeg-homebrew/test.sh similarity index 100% rename from test/ffmpeg-homebrew/test.sh rename to archive/test/ffmpeg-homebrew/test.sh diff --git a/test/gh-release/scenarios.json b/archive/test/gh-release/scenarios.json similarity index 100% rename from test/gh-release/scenarios.json rename to archive/test/gh-release/scenarios.json diff --git a/test/gh-release/test_act.sh b/archive/test/gh-release/test_act.sh similarity index 100% rename from test/gh-release/test_act.sh rename to archive/test/gh-release/test_act.sh diff --git a/test/gh-release/test_apiops.sh b/archive/test/gh-release/test_apiops.sh similarity index 100% rename from test/gh-release/test_apiops.sh rename to archive/test/gh-release/test_apiops.sh diff --git a/test/gh-release/test_defaults_alpine.sh b/archive/test/gh-release/test_defaults_alpine.sh similarity index 100% rename from test/gh-release/test_defaults_alpine.sh rename to archive/test/gh-release/test_defaults_alpine.sh diff --git a/test/gh-release/test_defaults_debian.sh b/archive/test/gh-release/test_defaults_debian.sh similarity index 100% rename from test/gh-release/test_defaults_debian.sh rename to archive/test/gh-release/test_defaults_debian.sh diff --git a/test/gh-release/test_etcd.sh b/archive/test/gh-release/test_etcd.sh similarity index 100% rename from test/gh-release/test_etcd.sh rename to archive/test/gh-release/test_etcd.sh diff --git a/test/gh-release/test_no_git.sh b/archive/test/gh-release/test_no_git.sh similarity index 100% rename from test/gh-release/test_no_git.sh rename to archive/test/gh-release/test_no_git.sh diff --git a/test/gh-release/test_powershell.sh b/archive/test/gh-release/test_powershell.sh similarity index 100% rename from test/gh-release/test_powershell.sh rename to archive/test/gh-release/test_powershell.sh diff --git a/test/homebrew-package/scenarios.json b/archive/test/homebrew-package/scenarios.json similarity index 100% rename from test/homebrew-package/scenarios.json rename to archive/test/homebrew-package/scenarios.json diff --git a/test/homebrew-package/test_file_limit.sh b/archive/test/homebrew-package/test_file_limit.sh similarity index 100% rename from test/homebrew-package/test_file_limit.sh rename to archive/test/homebrew-package/test_file_limit.sh diff --git a/test/homebrew-package/test_git_based_version.sh b/archive/test/homebrew-package/test_git_based_version.sh similarity index 100% rename from test/homebrew-package/test_git_based_version.sh rename to archive/test/homebrew-package/test_git_based_version.sh diff --git a/test/homebrew-package/test_latest.sh b/archive/test/homebrew-package/test_latest.sh similarity index 100% rename from test/homebrew-package/test_latest.sh rename to archive/test/homebrew-package/test_latest.sh diff --git a/test/homebrew-package/test_specific_version.sh b/archive/test/homebrew-package/test_specific_version.sh similarity index 100% rename from test/homebrew-package/test_specific_version.sh rename to archive/test/homebrew-package/test_specific_version.sh diff --git a/test/homebrew-package/test_universal.sh b/archive/test/homebrew-package/test_universal.sh similarity index 100% rename from test/homebrew-package/test_universal.sh rename to archive/test/homebrew-package/test_universal.sh diff --git a/test/ko/scenarios.json b/archive/test/ko/scenarios.json similarity index 100% rename from test/ko/scenarios.json rename to archive/test/ko/scenarios.json diff --git a/test/ko/test_defaults_debian.sh b/archive/test/ko/test_defaults_debian.sh similarity index 100% rename from test/ko/test_defaults_debian.sh rename to archive/test/ko/test_defaults_debian.sh diff --git a/test/kubescape/scenarios.json b/archive/test/kubescape/scenarios.json similarity index 100% rename from test/kubescape/scenarios.json rename to archive/test/kubescape/scenarios.json diff --git a/test/kubescape/test_defaults_debian.sh b/archive/test/kubescape/test_defaults_debian.sh similarity index 100% rename from test/kubescape/test_defaults_debian.sh rename to archive/test/kubescape/test_defaults_debian.sh diff --git a/test/mage/scenarios.json b/archive/test/mage/scenarios.json similarity index 100% rename from test/mage/scenarios.json rename to archive/test/mage/scenarios.json diff --git a/test/mage/test_defaults_debian.sh b/archive/test/mage/test_defaults_debian.sh similarity index 100% rename from test/mage/test_defaults_debian.sh rename to archive/test/mage/test_defaults_debian.sh diff --git a/test/meson-asdf/scenarios.json b/archive/test/meson-asdf/scenarios.json similarity index 100% rename from test/meson-asdf/scenarios.json rename to archive/test/meson-asdf/scenarios.json diff --git a/test/meson-asdf/test.sh b/archive/test/meson-asdf/test.sh similarity index 100% rename from test/meson-asdf/test.sh rename to archive/test/meson-asdf/test.sh diff --git a/test/meteor-cli/scenarios.json b/archive/test/meteor-cli/scenarios.json similarity index 100% rename from test/meteor-cli/scenarios.json rename to archive/test/meteor-cli/scenarios.json diff --git a/test/meteor-cli/test.sh b/archive/test/meteor-cli/test.sh similarity index 100% rename from test/meteor-cli/test.sh rename to archive/test/meteor-cli/test.sh diff --git a/test/nushell/scenarios.json b/archive/test/nushell/scenarios.json similarity index 100% rename from test/nushell/scenarios.json rename to archive/test/nushell/scenarios.json diff --git a/test/nushell/test_defaults_debian.sh b/archive/test/nushell/test_defaults_debian.sh similarity index 100% rename from test/nushell/test_defaults_debian.sh rename to archive/test/nushell/test_defaults_debian.sh diff --git a/test/ory-keto/scenarios.json b/archive/test/ory-keto/scenarios.json similarity index 100% rename from test/ory-keto/scenarios.json rename to archive/test/ory-keto/scenarios.json diff --git a/test/ory-keto/test_defaults_debian.sh b/archive/test/ory-keto/test_defaults_debian.sh similarity index 100% rename from test/ory-keto/test_defaults_debian.sh rename to archive/test/ory-keto/test_defaults_debian.sh diff --git a/test/pip-audit/scenarios.json b/archive/test/pip-audit/scenarios.json similarity index 100% rename from test/pip-audit/scenarios.json rename to archive/test/pip-audit/scenarios.json diff --git a/test/pip-audit/test_defaults.sh b/archive/test/pip-audit/test_defaults.sh similarity index 100% rename from test/pip-audit/test_defaults.sh rename to archive/test/pip-audit/test_defaults.sh diff --git a/test/porter/install_mixins.sh b/archive/test/porter/install_mixins.sh similarity index 100% rename from test/porter/install_mixins.sh rename to archive/test/porter/install_mixins.sh diff --git a/test/porter/scenarios.json b/archive/test/porter/scenarios.json similarity index 100% rename from test/porter/scenarios.json rename to archive/test/porter/scenarios.json diff --git a/test/porter/test.sh b/archive/test/porter/test.sh similarity index 100% rename from test/porter/test.sh rename to archive/test/porter/test.sh diff --git a/test/postgres-asdf/scenarios.json b/archive/test/postgres-asdf/scenarios.json similarity index 100% rename from test/postgres-asdf/scenarios.json rename to archive/test/postgres-asdf/scenarios.json diff --git a/test/postgres-asdf/test.sh b/archive/test/postgres-asdf/test.sh similarity index 100% rename from test/postgres-asdf/test.sh rename to archive/test/postgres-asdf/test.sh diff --git a/test/rabbitmq-asdf/scenarios.json b/archive/test/rabbitmq-asdf/scenarios.json similarity index 100% rename from test/rabbitmq-asdf/scenarios.json rename to archive/test/rabbitmq-asdf/scenarios.json diff --git a/test/rabbitmq-asdf/test.sh b/archive/test/rabbitmq-asdf/test.sh similarity index 100% rename from test/rabbitmq-asdf/test.sh rename to archive/test/rabbitmq-asdf/test.sh diff --git a/test/scala-asdf/scenarios.json b/archive/test/scala-asdf/scenarios.json similarity index 100% rename from test/scala-asdf/scenarios.json rename to archive/test/scala-asdf/scenarios.json diff --git a/test/scala-asdf/test.sh b/archive/test/scala-asdf/test.sh similarity index 100% rename from test/scala-asdf/test.sh rename to archive/test/scala-asdf/test.sh diff --git a/test/serverless/scenarios.json b/archive/test/serverless/scenarios.json similarity index 100% rename from test/serverless/scenarios.json rename to archive/test/serverless/scenarios.json diff --git a/test/serverless/test.sh b/archive/test/serverless/test.sh similarity index 100% rename from test/serverless/test.sh rename to archive/test/serverless/test.sh diff --git a/test/sqlfmt/scenarios.json b/archive/test/sqlfmt/scenarios.json similarity index 100% rename from test/sqlfmt/scenarios.json rename to archive/test/sqlfmt/scenarios.json diff --git a/test/sqlfmt/test.sh b/archive/test/sqlfmt/test.sh similarity index 100% rename from test/sqlfmt/test.sh rename to archive/test/sqlfmt/test.sh diff --git a/test/syncthing/scenarios.json b/archive/test/syncthing/scenarios.json similarity index 100% rename from test/syncthing/scenarios.json rename to archive/test/syncthing/scenarios.json diff --git a/test/syncthing/test_defaults_debian.sh b/archive/test/syncthing/test_defaults_debian.sh similarity index 100% rename from test/syncthing/test_defaults_debian.sh rename to archive/test/syncthing/test_defaults_debian.sh diff --git a/test/trivy/scenarios.json b/archive/test/trivy/scenarios.json similarity index 100% rename from test/trivy/scenarios.json rename to archive/test/trivy/scenarios.json diff --git a/test/trivy/test_defaults_debian.sh b/archive/test/trivy/test_defaults_debian.sh similarity index 100% rename from test/trivy/test_defaults_debian.sh rename to archive/test/trivy/test_defaults_debian.sh diff --git a/test/vertx-sdkman/scenarios.json b/archive/test/vertx-sdkman/scenarios.json similarity index 100% rename from test/vertx-sdkman/scenarios.json rename to archive/test/vertx-sdkman/scenarios.json diff --git a/test/vertx-sdkman/test.sh b/archive/test/vertx-sdkman/test.sh similarity index 100% rename from test/vertx-sdkman/test.sh rename to archive/test/vertx-sdkman/test.sh diff --git a/test/visualvm-sdkman/scenarios.json b/archive/test/visualvm-sdkman/scenarios.json similarity index 100% rename from test/visualvm-sdkman/scenarios.json rename to archive/test/visualvm-sdkman/scenarios.json diff --git a/test/visualvm-sdkman/test.sh b/archive/test/visualvm-sdkman/test.sh similarity index 100% rename from test/visualvm-sdkman/test.sh rename to archive/test/visualvm-sdkman/test.sh diff --git a/test/volta/scenarios.json b/archive/test/volta/scenarios.json similarity index 100% rename from test/volta/scenarios.json rename to archive/test/volta/scenarios.json diff --git a/test/volta/test_defaults_debian.sh b/archive/test/volta/test_defaults_debian.sh similarity index 100% rename from test/volta/test_defaults_debian.sh rename to archive/test/volta/test_defaults_debian.sh diff --git a/test/xplr/scenarios.json b/archive/test/xplr/scenarios.json similarity index 100% rename from test/xplr/scenarios.json rename to archive/test/xplr/scenarios.json diff --git a/test/xplr/test_defaults_debian.sh b/archive/test/xplr/test_defaults_debian.sh similarity index 100% rename from test/xplr/test_defaults_debian.sh rename to archive/test/xplr/test_defaults_debian.sh From 679812d1c83c4c9755577ac586420bbbdde007d0 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 28 Sep 2024 08:03:03 +0000 Subject: [PATCH 18/38] chore(ansible): fix test scenario script --- test/ansible/test_version_selection.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/ansible/test_version_selection.sh b/test/ansible/test_version_selection.sh index 937f01030..b51f06dc1 100755 --- a/test/ansible/test_version_selection.sh +++ b/test/ansible/test_version_selection.sh @@ -4,6 +4,6 @@ set -e source dev-container-features-test-lib -check "ansible --version | grep 'core 2.13.0'" ansible --version | grep 'core 2.13.0' +check "ansible version is equal to 2.13.0" bash -c "ansible --version | grep 'core 2.13.0'" reportResults From 64c54117a168d9240b374c47c860b5977cb8bc3c Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 28 Sep 2024 14:47:33 +0000 Subject: [PATCH 19/38] fix(pipx-package): don't alter main package version by using requirements.txt file --- src/pipx-package/devcontainer-feature.json | 2 +- src/pipx-package/install.sh | 19 +++++++++++-------- .../install_ansible_specific_version.sh | 11 +++++++++++ test/pipx-package/scenarios.json | 14 +++++++++++++- 4 files changed, 36 insertions(+), 10 deletions(-) create mode 100755 test/pipx-package/install_ansible_specific_version.sh diff --git a/src/pipx-package/devcontainer-feature.json b/src/pipx-package/devcontainer-feature.json index c8e1d7a8e..ae01f9ec3 100644 --- a/src/pipx-package/devcontainer-feature.json +++ b/src/pipx-package/devcontainer-feature.json @@ -1,7 +1,7 @@ { "name": "Pipx package", "id": "pipx-package", - "version": "1.1.8", + "version": "1.1.9", "description": "Installs a pipx package.", "documentationURL": "http://github.com/devcontainers-contrib/features/tree/main/src/pipx-package", "installsAfter": [ diff --git a/src/pipx-package/install.sh b/src/pipx-package/install.sh index f757fd121..0c89c5710 100755 --- a/src/pipx-package/install.sh +++ b/src/pipx-package/install.sh @@ -7,7 +7,7 @@ INJECTIONS=${INJECTIONS:-""} INCLUDEDEPS=${INCLUDEDEPS:-"false"} INTERPRETER=${INTERPRETER:-""} -# PEP 668 compatibility +# PEP 668 compatibility export PIP_BREAK_SYSTEM_PACKAGES=1 # Clean up @@ -25,7 +25,7 @@ if [ "$(id -u)" -ne 0 ]; then fi updaterc() { - if cat /etc/os-release | grep "ID_LIKE=.*alpine.*\|ID=.*alpine.*" ; then + if cat /etc/os-release | grep "ID_LIKE=.*alpine.*\|ID=.*alpine.*"; then echo "Updating /etc/profile" echo -e "$1" >>/etc/profile fi @@ -55,7 +55,7 @@ install_via_pipx() { else local _interpreter="python3" fi - + if [ -z "$INTERPRETER" ]; then # if interpreter selected manually - it should exists (validated above) if [ "$_interpreter" = "python3" ]; then @@ -112,12 +112,11 @@ install_via_pipx() { updaterc "if [[ \"\${PATH}\" != *\"\${PIPX_BIN_DIR}\"* ]]; then export PATH=\"\${PATH}:\${PIPX_BIN_DIR}\"; fi" } - - if $_interpreter -m pip list | grep pipx >/dev/null 2>&1; then + if $_interpreter -m pip list | grep pipx >/dev/null 2>&1; then # if pipx exists in the selected interpreter - use it pipx_bin="$_interpreter -m pipx" elif [ -n "$INTERPRETER" ]; then - # if interpreter was *explicitely* selected, + # if interpreter was *explicitely* selected, # and pipx is not installed with it - install it _install_pipx pipx_bin="$_interpreter -m pipx" @@ -130,7 +129,6 @@ install_via_pipx() { pipx_bin=$PYTHONUSERBASE/bin/pipx fi - if [ "$(${pipx_bin} list --short | grep "$PACKAGE")" != "" ]; then echo "$PACKAGE already exists - skipping installation" else @@ -148,8 +146,13 @@ install_via_pipx() { injections_array=($INJECTIONS) injections_array_length="${#injections_array[@]}" + # Save the main package info (with version) to a temporary requirements file + # Used in inject command to prevent altering the main package version + tmp_requirements_file="/tmp/requirements.txt" + echo "$pipx_installation" >$tmp_requirements_file + for ((i = 0; i < ${injections_array_length}; i++)); do - ${pipx_bin} inject --pip-args '--no-cache-dir --force-reinstall' -f "$PACKAGE" "${injections_array[$i]}" + ${pipx_bin} inject --pip-args "--no-cache-dir --force-reinstall -r ${tmp_requirements_file}" -f "$PACKAGE" "${injections_array[$i]}" done # cleaning pipx to save disk space diff --git a/test/pipx-package/install_ansible_specific_version.sh b/test/pipx-package/install_ansible_specific_version.sh new file mode 100755 index 000000000..9c9b888a8 --- /dev/null +++ b/test/pipx-package/install_ansible_specific_version.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -e + +source dev-container-features-test-lib + +ansible --version +check "ansible-core version is equal to 2.16.11" bash -c 'ansible --version | grep "core 2.16.11"' +check "ansible version is equal to 9.10.x" bash -c 'pipx list --include-injected | grep "ansible 9.10."' + +reportResults diff --git a/test/pipx-package/scenarios.json b/test/pipx-package/scenarios.json index f10766eb9..ae3cf15ae 100644 --- a/test/pipx-package/scenarios.json +++ b/test/pipx-package/scenarios.json @@ -31,7 +31,9 @@ "install_black_custom_interpreter": { "image": "debian:bullseye", "features": { - "ghcr.io/devcontainers/features/python:1": {"version": "3.8.10"}, + "ghcr.io/devcontainers/features/python:1": { + "version": "3.8.10" + }, "pipx-package": { "interpreter": "/usr/local/python/3.8.10/bin/python3", "version": "latest", @@ -39,5 +41,15 @@ "injections": "tqdm pylint" } } + }, + "install_ansible_specific_version": { + "image": "mcr.microsoft.com/devcontainers/base:bookworm", + "features": { + "pipx-package": { + "version": "2.16.11", + "package": "ansible-core", + "injections": "ansible" + } + } } } \ No newline at end of file From 227529fe9147df9ba7f2bb82ca541876fa56c179 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sat, 28 Sep 2024 14:59:18 +0000 Subject: [PATCH 20/38] fix(ansible): bumb pipx-package version to 1.1.9 --- src/ansible/install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansible/install.sh b/src/ansible/install.sh index 3076d9b0d..9d023f8e2 100755 --- a/src/ansible/install.sh +++ b/src/ansible/install.sh @@ -12,7 +12,7 @@ ensure_nanolayer nanolayer_location "v0.5.0" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ + "ghcr.io/devcontainers-extra/features/pipx-package:1.1.9" \ --option package='ansible-core' --option injections='ansible' --option version="$VERSION" echo 'Done!' From a6870f1539fb3a7e59bb954a9013639629fcb808 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 29 Sep 2024 09:13:58 +0000 Subject: [PATCH 21/38] chore: add command to run filtered tests in justfile --- .devcontainer/devcontainer.json | 7 +++++-- justfile | 7 ++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 03055d020..3d12cfe1b 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,7 +6,8 @@ "mads-hartmann.bash-ide-vscode", "ms-python.python", "ms-python.vscode-pylance", - "DavidAnson.vscode-markdownlint" + "DavidAnson.vscode-markdownlint", + "nefrob.vscode-just-syntax" ] } }, @@ -17,7 +18,9 @@ "ghcr.io/devcontainers/features/docker-in-docker:2.11.0": {}, "ghcr.io/devcontainers/features/common-utils:2.5.1": {}, "ghcr.io/lukewiwa/features/shellcheck:0.2.3": {}, - "ghcr.io/guiyomh/features/just:0": {} + "ghcr.io/guiyomh/features/just:0": {}, + "ghcr.io/devcontainers-extra/features/pre-commit:2": {}, + "ghcr.io/devcontainers-extra/features/tmux-homebrew:1": {} }, "postCreateCommand": "/bin/bash -ex ./.devcontainer/setup.sh > postCreateCommand.log" } \ No newline at end of file diff --git a/justfile b/justfile index 12efc8465..d195171f5 100644 --- a/justfile +++ b/justfile @@ -1,2 +1,7 @@ +set positional-arguments + test feature-name: - devcontainer features test -f {{feature-name}} --skip-autogenerated \ No newline at end of file + devcontainer features test -f {{feature-name}} --skip-autogenerated + +test-scenario feature-name scenario-filter: + devcontainer features test -f {{feature-name}} --filter "{{scenario-filter}}" --skip-autogenerated From c998649b8305be63286b08f7ac892c5f339bae89 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 29 Sep 2024 09:22:09 +0000 Subject: [PATCH 22/38] chore: add pre-commit config --- .devcontainer/setup.sh | 2 ++ .pre-commit-config.yaml | 7 +++++++ 2 files changed, 9 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh index d1c809ff4..5de2908c9 100755 --- a/.devcontainer/setup.sh +++ b/.devcontainer/setup.sh @@ -15,3 +15,5 @@ fi # Run a new explainshell container # this will add hover annotations in shell script files, assuming mads-hartmann.bash-ide-vscod is installed docker container run --name explainshell --restart always -p 5000:5000 -d spaceinvaderone/explainshell + +pre-commit install --install-hooks diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..a4a0bf27b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,7 @@ +--- +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml + - id: trailing-whitespace \ No newline at end of file From 3602bf28f5974039cfcd9d80a9eb4d21deb0fac1 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 29 Sep 2024 09:32:49 +0000 Subject: [PATCH 23/38] chore: fix trailing whitespaces --- .../list-changed-features-action/action.yaml | 8 ++-- .github/workflows/release.yaml | 2 +- .github/workflows/test.yaml | 20 +++++----- .vscode/settings.json | 1 + README.md | 2 +- .../src/actions-runner-noexternals/install.sh | 10 ++--- .../library_scripts.sh | 28 +++++++------- .../install.sh | 10 ++--- .../library_scripts.sh | 28 +++++++------- .../src/actions-runner-noruntime/install.sh | 10 ++--- .../library_scripts.sh | 28 +++++++------- archive/src/age-keygen/install.sh | 6 +-- archive/src/age-keygen/library_scripts.sh | 28 +++++++------- archive/src/age/install.sh | 6 +-- archive/src/age/library_scripts.sh | 28 +++++++------- archive/src/airplane-cli/install.sh | 6 +-- archive/src/airplane-cli/library_scripts.sh | 28 +++++++------- archive/src/bitwarden-cli/install.sh | 6 +-- archive/src/bitwarden-cli/library_scripts.sh | 28 +++++++------- archive/src/boundary-asdf/install.sh | 6 +-- archive/src/boundary-asdf/library_scripts.sh | 30 +++++++-------- archive/src/btm/install.sh | 6 +-- archive/src/btm/library_scripts.sh | 28 +++++++------- archive/src/cert-manager/install.sh | 6 +-- archive/src/cert-manager/library_scripts.sh | 28 +++++++------- archive/src/cmctl-asdf/install.sh | 6 +-- archive/src/cmctl-asdf/library_scripts.sh | 30 +++++++-------- archive/src/codenotary-cas/install.sh | 6 +-- archive/src/codenotary-cas/library_scripts.sh | 28 +++++++------- archive/src/croc/install.sh | 6 +-- archive/src/croc/library_scripts.sh | 28 +++++++------- archive/src/cue-asdf/install.sh | 6 +-- archive/src/cue-asdf/library_scripts.sh | 30 +++++++-------- archive/src/edge-impulse-cli/install.sh | 6 +-- .../src/edge-impulse-cli/library_scripts.sh | 28 +++++++------- archive/src/elixir-asdf/install.sh | 10 ++--- archive/src/elixir-asdf/library_scripts.sh | 28 +++++++------- archive/src/erlang-asdf/install.sh | 8 ++-- archive/src/erlang-asdf/library_scripts.sh | 28 +++++++------- archive/src/ffmpeg-homebrew/install.sh | 4 +- .../src/ffmpeg-homebrew/library_scripts.sh | 30 +++++++-------- archive/src/gh-release/install.sh | 4 +- archive/src/gh-release/library_scripts.sh | 28 +++++++------- archive/src/homebrew-package/install.sh | 38 +++++++++---------- .../src/homebrew-package/library_scripts.sh | 30 +++++++-------- archive/src/ko/install.sh | 6 +-- archive/src/ko/library_scripts.sh | 28 +++++++------- archive/src/kubescape/install.sh | 6 +-- archive/src/kubescape/library_scripts.sh | 28 +++++++------- archive/src/mage/install.sh | 6 +-- archive/src/mage/library_scripts.sh | 28 +++++++------- archive/src/meson-asdf/install.sh | 6 +-- archive/src/meson-asdf/library_scripts.sh | 30 +++++++-------- archive/src/meteor-cli/install.sh | 6 +-- archive/src/meteor-cli/library_scripts.sh | 28 +++++++------- archive/src/nushell/install.sh | 6 +-- archive/src/nushell/library_scripts.sh | 28 +++++++------- archive/src/ory-keto/install.sh | 6 +-- archive/src/ory-keto/library_scripts.sh | 28 +++++++------- archive/src/pip-audit/install.sh | 6 +-- archive/src/pip-audit/library_scripts.sh | 28 +++++++------- archive/src/porter/install.sh | 24 ++++++------ archive/src/porter/library_scripts.sh | 30 +++++++-------- archive/src/postgres-asdf/install.sh | 8 ++-- archive/src/postgres-asdf/library_scripts.sh | 30 +++++++-------- archive/src/rabbitmq-asdf/install.sh | 12 +++--- archive/src/rabbitmq-asdf/library_scripts.sh | 30 +++++++-------- archive/src/scala-asdf/install.sh | 8 ++-- archive/src/scala-asdf/library_scripts.sh | 30 +++++++-------- archive/src/serverless/install.sh | 6 +-- archive/src/serverless/library_scripts.sh | 28 +++++++------- archive/src/sqlfmt/install.sh | 6 +-- archive/src/sqlfmt/library_scripts.sh | 28 +++++++------- archive/src/syncthing/install.sh | 6 +-- archive/src/syncthing/library_scripts.sh | 28 +++++++------- archive/src/trivy/install.sh | 6 +-- archive/src/trivy/library_scripts.sh | 28 +++++++------- archive/src/vertx-sdkman/install.sh | 4 +- archive/src/vertx-sdkman/library_scripts.sh | 30 +++++++-------- archive/src/visualvm-sdkman/install.sh | 4 +- .../src/visualvm-sdkman/library_scripts.sh | 30 +++++++-------- archive/src/volta/install.sh | 6 +-- archive/src/volta/library_scripts.sh | 28 +++++++------- archive/src/xplr/install.sh | 6 +-- archive/src/xplr/library_scripts.sh | 28 +++++++------- src/act-asdf/install.sh | 6 +-- src/act-asdf/library_scripts.sh | 30 +++++++-------- src/act/install.sh | 6 +-- src/act/library_scripts.sh | 28 +++++++------- src/actionlint/install.sh | 6 +-- src/actionlint/library_scripts.sh | 28 +++++++------- src/actions-runner/install.sh | 10 ++--- src/actions-runner/library_scripts.sh | 28 +++++++------- src/activemq-sdkman/install.sh | 4 +- src/activemq-sdkman/library_scripts.sh | 30 +++++++-------- src/akamai-cli/install.sh | 6 +-- src/akamai-cli/library_scripts.sh | 28 +++++++------- src/alertmanager/install.sh | 6 +-- src/alertmanager/library_scripts.sh | 28 +++++++------- src/alp-asdf/install.sh | 6 +-- src/alp-asdf/library_scripts.sh | 30 +++++++-------- src/amplify-cli/install.sh | 6 +-- src/amplify-cli/library_scripts.sh | 28 +++++++------- src/angular-cli/install.sh | 6 +-- src/angular-cli/library_scripts.sh | 28 +++++++------- src/ansible/library_scripts.sh | 28 +++++++------- src/ant-sdkman/install.sh | 4 +- src/ant-sdkman/library_scripts.sh | 30 +++++++-------- src/apko/install.sh | 6 +-- src/apko/library_scripts.sh | 28 +++++++------- src/apt-get-packages/install.sh | 6 +-- src/apt-get-packages/library_scripts.sh | 30 +++++++-------- src/apt-packages/install.sh | 6 +-- src/apt-packages/library_scripts.sh | 30 +++++++-------- src/argo-cd/install.sh | 2 +- src/asciidoctorj-sdkman/install.sh | 4 +- src/asciidoctorj-sdkman/library_scripts.sh | 30 +++++++-------- src/asdf-package/install.sh | 18 ++++----- src/assemblyscript/install.sh | 6 +-- src/assemblyscript/library_scripts.sh | 28 +++++++------- src/atlantis/install.sh | 6 +-- src/atlantis/library_scripts.sh | 28 +++++++------- src/atmos/install.sh | 6 +-- src/atmos/library_scripts.sh | 28 +++++++------- src/auditjs/install.sh | 6 +-- src/auditjs/library_scripts.sh | 28 +++++++------- src/autoenv/install.sh | 6 +-- src/autoenv/library_scripts.sh | 28 +++++++------- src/aws-cdk/install.sh | 6 +-- src/aws-cdk/library_scripts.sh | 28 +++++++------- src/aws-eb-cli/install.sh | 6 +-- src/aws-eb-cli/library_scripts.sh | 28 +++++++------- src/aztfexport/install.sh | 6 +-- src/aztfexport/library_scripts.sh | 28 +++++++------- src/azure-apiops/library_scripts.sh | 28 +++++++------- src/ballerina-sdkman/install.sh | 4 +- src/ballerina-sdkman/library_scripts.sh | 30 +++++++-------- src/bandit/install.sh | 6 +-- src/bandit/library_scripts.sh | 28 +++++++------- src/bartib/install.sh | 6 +-- src/bartib/library_scripts.sh | 28 +++++++------- src/beehive/install.sh | 6 +-- src/beehive/library_scripts.sh | 28 +++++++------- src/bigcommerce-stencil-cli/install.sh | 6 +-- .../library_scripts.sh | 28 +++++++------- src/bikeshed/install.sh | 6 +-- src/bikeshed/library_scripts.sh | 28 +++++++------- src/bin/install.sh | 6 +-- src/bin/library_scripts.sh | 28 +++++++------- src/black/install.sh | 6 +-- src/black/library_scripts.sh | 28 +++++++------- src/blackbox-exporter/install.sh | 6 +-- src/blackbox-exporter/library_scripts.sh | 28 +++++++------- src/bomber/install.sh | 6 +-- src/bomber/library_scripts.sh | 28 +++++++------- src/bower/install.sh | 6 +-- src/bower/library_scripts.sh | 28 +++++++------- src/bpipe-sdkman/install.sh | 4 +- src/bpipe-sdkman/library_scripts.sh | 30 +++++++-------- src/brownie/library_scripts.sh | 28 +++++++------- src/browserify/install.sh | 6 +-- src/browserify/library_scripts.sh | 28 +++++++------- src/btop-homebrew/install.sh | 4 +- src/btop-homebrew/library_scripts.sh | 30 +++++++-------- src/btrace-sdkman/install.sh | 4 +- src/btrace-sdkman/library_scripts.sh | 30 +++++++-------- src/budibase-cli/install.sh | 6 +-- src/budibase-cli/library_scripts.sh | 28 +++++++------- src/buku/install.sh | 6 +-- src/buku/library_scripts.sh | 28 +++++++------- src/caddy/install.sh | 8 ++-- src/caddy/library_scripts.sh | 28 +++++++------- src/ccache-asdf/install.sh | 8 ++-- src/ccache-asdf/library_scripts.sh | 30 +++++++-------- src/checkov/install.sh | 6 +-- src/checkov/library_scripts.sh | 28 +++++++------- src/chezscheme-asdf/install.sh | 8 ++-- src/chezscheme-asdf/library_scripts.sh | 30 +++++++-------- src/chisel/install.sh | 6 +-- src/chisel/library_scripts.sh | 28 +++++++------- src/circleci-cli/install.sh | 6 +-- src/circleci-cli/library_scripts.sh | 28 +++++++------- src/clojure-asdf/install.sh | 6 +-- src/clojure-asdf/library_scripts.sh | 30 +++++++-------- src/cloud-nuke/install.sh | 6 +-- src/cloud-nuke/library_scripts.sh | 28 +++++++------- src/cloudflare-wrangler/install.sh | 6 +-- src/cloudflare-wrangler/library_scripts.sh | 28 +++++++------- src/cloudflared-fips/install.sh | 6 +-- src/cloudflared-fips/library_scripts.sh | 28 +++++++------- src/cloudflared/install.sh | 6 +-- src/cloudflared/library_scripts.sh | 28 +++++++------- src/cloudinary-cli/install.sh | 6 +-- src/cloudinary-cli/library_scripts.sh | 28 +++++++------- src/codefresh-cli/install.sh | 6 +-- src/codefresh-cli/library_scripts.sh | 28 +++++++------- src/composer/install.sh | 6 +-- src/composer/library_scripts.sh | 28 +++++++------- src/concurnas-sdkman/install.sh | 4 +- src/concurnas-sdkman/library_scripts.sh | 30 +++++++-------- src/connor-sdkman/install.sh | 4 +- src/connor-sdkman/library_scripts.sh | 30 +++++++-------- src/consul-asdf/install.sh | 6 +-- src/consul-asdf/library_scripts.sh | 30 +++++++-------- src/consul-exporter/install.sh | 6 +-- src/consul-exporter/library_scripts.sh | 28 +++++++------- src/cookiecutter/install.sh | 6 +-- src/cookiecutter/library_scripts.sh | 28 +++++++------- src/copier/install.sh | 6 +-- src/copier/library_scripts.sh | 28 +++++++------- src/corepack/install.sh | 6 +-- src/corepack/library_scripts.sh | 28 +++++++------- src/cosign/install.sh | 6 +-- src/cosign/library_scripts.sh | 28 +++++++------- src/coverage-py/install.sh | 6 +-- src/coverage-py/library_scripts.sh | 28 +++++++------- src/crystal-asdf/install.sh | 6 +-- src/crystal-asdf/library_scripts.sh | 30 +++++++-------- src/cuba-sdkman/install.sh | 4 +- src/cuba-sdkman/library_scripts.sh | 30 +++++++-------- src/curl-apt-get/install.sh | 4 +- src/curl-apt-get/library_scripts.sh | 30 +++++++-------- src/curl-homebrew/install.sh | 4 +- src/curl-homebrew/library_scripts.sh | 30 +++++++-------- src/cve-bin-tool/install.sh | 6 +-- src/cve-bin-tool/library_scripts.sh | 28 +++++++------- src/cxf-sdkman/install.sh | 4 +- src/cxf-sdkman/library_scripts.sh | 30 +++++++-------- src/cyclonedx-cli/install.sh | 6 +-- src/cyclonedx-cli/library_scripts.sh | 28 +++++++------- src/cyclonedx-python/install.sh | 6 +-- src/cyclonedx-python/library_scripts.sh | 28 +++++++------- src/cz-cli/install.sh | 6 +-- src/cz-cli/library_scripts.sh | 28 +++++++------- src/dasel-asdf/install.sh | 6 +-- src/dasel-asdf/library_scripts.sh | 30 +++++++-------- src/dashlane-cli/install.sh | 6 +-- src/dashlane-cli/library_scripts.sh | 28 +++++++------- src/datadog-ci-cli/install.sh | 6 +-- src/datadog-ci-cli/library_scripts.sh | 28 +++++++------- src/datasette/install.sh | 6 +-- src/datasette/library_scripts.sh | 28 +++++++------- src/dbt-coverage/install.sh | 6 +-- src/dbt-coverage/library_scripts.sh | 28 +++++++------- src/ddgr-apt-get/install.sh | 4 +- src/ddgr-apt-get/library_scripts.sh | 30 +++++++-------- src/ddgr-homebrew/install.sh | 4 +- src/ddgr-homebrew/library_scripts.sh | 30 +++++++-------- src/deno-asdf/install.sh | 6 +-- src/deno-asdf/library_scripts.sh | 30 +++++++-------- src/devcontainers-cli/install.sh | 6 +-- src/devcontainers-cli/library_scripts.sh | 28 +++++++------- src/direnv-asdf/install.sh | 6 +-- src/direnv-asdf/library_scripts.sh | 30 +++++++-------- src/direnv/install.sh | 6 +-- src/direnv/library_scripts.sh | 28 +++++++------- src/dive/install.sh | 6 +-- src/dive/library_scripts.sh | 28 +++++++------- src/dnote/install.sh | 6 +-- src/dnote/library_scripts.sh | 28 +++++++------- src/doctoolchain-sdkman/install.sh | 4 +- src/doctoolchain-sdkman/library_scripts.sh | 30 +++++++-------- src/dprint-asdf/install.sh | 6 +-- src/dprint-asdf/library_scripts.sh | 30 +++++++-------- src/driftctl/install.sh | 6 +-- src/driftctl/library_scripts.sh | 28 +++++++------- src/drone-cli/install.sh | 6 +-- src/drone-cli/library_scripts.sh | 28 +++++++------- src/dua/install.sh | 6 +-- src/dua/library_scripts.sh | 28 +++++++------- src/duf/install.sh | 6 +-- src/duf/library_scripts.sh | 28 +++++++------- src/dufs/install.sh | 6 +-- src/dufs/library_scripts.sh | 28 +++++++------- src/eas-cli/install.sh | 6 +-- src/eas-cli/library_scripts.sh | 28 +++++++------- src/eget/install.sh | 6 +-- src/eget/library_scripts.sh | 28 +++++++------- src/elasticsearch-asdf/install.sh | 6 +-- src/elasticsearch-asdf/library_scripts.sh | 30 +++++++-------- src/elm-asdf/install.sh | 6 +-- src/elm-asdf/library_scripts.sh | 30 +++++++-------- src/ember-cli/install.sh | 6 +-- src/ember-cli/library_scripts.sh | 28 +++++++------- src/envoy/install.sh | 6 +-- src/envoy/library_scripts.sh | 28 +++++++------- src/epinio/install.sh | 6 +-- src/epinio/library_scripts.sh | 28 +++++++------- src/etcd/install.sh | 6 +-- src/etcd/library_scripts.sh | 28 +++++++------- src/exa/install.sh | 6 +-- src/exa/library_scripts.sh | 28 +++++++------- src/exercism-cli/install.sh | 6 +-- src/exercism-cli/library_scripts.sh | 28 +++++++------- src/expo-cli/install.sh | 6 +-- src/expo-cli/library_scripts.sh | 28 +++++++------- src/express-generator/install.sh | 6 +-- src/express-generator/library_scripts.sh | 28 +++++++------- src/fd/install.sh | 6 +-- src/fd/library_scripts.sh | 28 +++++++------- src/ffmpeg-apt-get/install.sh | 4 +- src/ffmpeg-apt-get/library_scripts.sh | 30 +++++++-------- src/firebase-cli/install.sh | 6 +-- src/firebase-cli/library_scripts.sh | 28 +++++++------- src/fish-apt-get/install.sh | 4 +- src/fish-apt-get/library_scripts.sh | 30 +++++++-------- src/fkill/install.sh | 6 +-- src/fkill/library_scripts.sh | 28 +++++++------- src/flake8/install.sh | 6 +-- src/flake8/library_scripts.sh | 28 +++++++------- src/flink-sdkman/install.sh | 4 +- src/flink-sdkman/library_scripts.sh | 30 +++++++-------- src/flit/install.sh | 6 +-- src/flit/library_scripts.sh | 28 +++++++------- src/former2-cli/install.sh | 8 ++-- src/former2-cli/library_scripts.sh | 28 +++++++------- src/fossil-apt-get/install.sh | 4 +- src/fossil-apt-get/library_scripts.sh | 30 +++++++-------- src/fossil-homebrew/install.sh | 4 +- src/fossil-homebrew/library_scripts.sh | 30 +++++++-------- src/fulcio/install.sh | 6 +-- src/fulcio/library_scripts.sh | 28 +++++++------- src/fzf/install.sh | 6 +-- src/fzf/library_scripts.sh | 28 +++++++------- src/gaiden-sdkman/install.sh | 4 +- src/gaiden-sdkman/library_scripts.sh | 30 +++++++-------- src/ganache/install.sh | 6 +-- src/ganache/library_scripts.sh | 28 +++++++------- src/gdbgui/install.sh | 6 +-- src/gdbgui/library_scripts.sh | 28 +++++++------- src/gh-cli/install.sh | 6 +-- src/gh-cli/library_scripts.sh | 28 +++++++------- src/git-lfs/install.sh | 6 +-- src/git-lfs/library_scripts.sh | 28 +++++++------- src/gitmux/install.sh | 6 +-- src/gitmux/library_scripts.sh | 28 +++++++------- src/gitomatic/install.sh | 6 +-- src/gitomatic/library_scripts.sh | 28 +++++++------- src/gitsign-credential-cache/install.sh | 6 +-- .../library_scripts.sh | 28 +++++++------- src/gitsign/install.sh | 6 +-- src/gitsign/library_scripts.sh | 28 +++++++------- src/gitty/install.sh | 6 +-- src/gitty/library_scripts.sh | 28 +++++++------- src/glances/install.sh | 6 +-- src/glances/library_scripts.sh | 28 +++++++------- src/gleam/install.sh | 6 +-- src/gleam/library_scripts.sh | 28 +++++++------- src/go-task/install.sh | 6 +-- src/go-task/library_scripts.sh | 28 +++++++------- src/graalvm-asdf/install.sh | 6 +-- src/graalvm-asdf/library_scripts.sh | 30 +++++++-------- src/gradle-sdkman/install.sh | 4 +- src/gradle-sdkman/library_scripts.sh | 30 +++++++-------- src/gradleprofiler-sdkman/install.sh | 4 +- src/gradleprofiler-sdkman/library_scripts.sh | 30 +++++++-------- src/grails-sdkman/install.sh | 4 +- src/grails-sdkman/library_scripts.sh | 30 +++++++-------- src/graphite-exporter/install.sh | 6 +-- src/graphite-exporter/library_scripts.sh | 28 +++++++------- src/groovy-sdkman/install.sh | 4 +- src/groovy-sdkman/library_scripts.sh | 30 +++++++-------- src/groovyserv-sdkman/install.sh | 4 +- src/groovyserv-sdkman/library_scripts.sh | 30 +++++++-------- src/grpcurl-asdf/install.sh | 6 +-- src/grpcurl-asdf/library_scripts.sh | 30 +++++++-------- src/grype/install.sh | 6 +-- src/grype/library_scripts.sh | 28 +++++++------- src/gulp-cli/install.sh | 6 +-- src/gulp-cli/library_scripts.sh | 28 +++++++------- src/hadoop-sdkman/install.sh | 4 +- src/hadoop-sdkman/library_scripts.sh | 30 +++++++-------- src/hatch/install.sh | 6 +-- src/hatch/library_scripts.sh | 28 +++++++------- src/haxe-asdf/install.sh | 6 +-- src/haxe-asdf/library_scripts.sh | 30 +++++++-------- src/hotel/install.sh | 6 +-- src/hotel/library_scripts.sh | 28 +++++++------- src/how2/install.sh | 6 +-- src/how2/library_scripts.sh | 28 +++++++------- src/http-server/install.sh | 6 +-- src/http-server/library_scripts.sh | 28 +++++++------- src/http4k-sdkman/install.sh | 4 +- src/http4k-sdkman/library_scripts.sh | 30 +++++++-------- src/hyperfine/install.sh | 6 +-- src/hyperfine/library_scripts.sh | 28 +++++++------- src/immuadmin-fips/install.sh | 6 +-- src/immuadmin-fips/library_scripts.sh | 28 +++++++------- src/immuadmin/install.sh | 6 +-- src/immuadmin/library_scripts.sh | 28 +++++++------- src/immuclient-fips/install.sh | 6 +-- src/immuclient-fips/library_scripts.sh | 28 +++++++------- src/immuclient/install.sh | 6 +-- src/immuclient/library_scripts.sh | 28 +++++++------- src/immudb-fips/install.sh | 6 +-- src/immudb-fips/library_scripts.sh | 28 +++++++------- src/immudb/install.sh | 6 +-- src/immudb/library_scripts.sh | 28 +++++++------- src/infracost/install.sh | 6 +-- src/infracost/library_scripts.sh | 28 +++++++------- src/infrastructor-sdkman/install.sh | 4 +- src/infrastructor-sdkman/library_scripts.sh | 30 +++++++-------- src/invoke/install.sh | 6 +-- src/invoke/library_scripts.sh | 28 +++++++------- src/ionic-cli/install.sh | 6 +-- src/ionic-cli/library_scripts.sh | 28 +++++++------- src/isort/install.sh | 6 +-- src/isort/library_scripts.sh | 28 +++++++------- src/istioctl/install.sh | 6 +-- src/istioctl/library_scripts.sh | 28 +++++++------- src/jake/install.sh | 6 +-- src/jake/library_scripts.sh | 28 +++++++------- src/jbake-sdkman/install.sh | 4 +- src/jbake-sdkman/library_scripts.sh | 30 +++++++-------- src/jbang-sdkman/install.sh | 4 +- src/jbang-sdkman/library_scripts.sh | 30 +++++++-------- src/jenkinsx-cli/install.sh | 6 +-- src/jenkinsx-cli/library_scripts.sh | 28 +++++++------- src/jest/install.sh | 6 +-- src/jest/library_scripts.sh | 28 +++++++------- src/jfrog-cli-homebrew/install.sh | 4 +- src/jfrog-cli-homebrew/library_scripts.sh | 30 +++++++-------- src/jfrog-cli-npm/install.sh | 6 +-- src/jfrog-cli-npm/library_scripts.sh | 28 +++++++------- src/jfrog-cli/install.sh | 6 +-- src/jfrog-cli/library_scripts.sh | 30 +++++++-------- src/jira-cli/install.sh | 6 +-- src/jira-cli/library_scripts.sh | 28 +++++++------- src/jmc-sdkman/install.sh | 4 +- src/jmc-sdkman/library_scripts.sh | 30 +++++++-------- src/jmeter-sdkman/install.sh | 4 +- src/jmeter-sdkman/library_scripts.sh | 30 +++++++-------- src/joern-sdkman/install.sh | 4 +- src/joern-sdkman/library_scripts.sh | 30 +++++++-------- src/jreleaser-sdkman/install.sh | 4 +- src/jreleaser-sdkman/library_scripts.sh | 30 +++++++-------- src/jrnl/install.sh | 6 +-- src/jrnl/library_scripts.sh | 28 +++++++------- src/jshint/install.sh | 6 +-- src/jshint/library_scripts.sh | 28 +++++++------- src/jsii-diff/install.sh | 6 +-- src/jsii-diff/library_scripts.sh | 28 +++++++------- src/jsii-pacmak/install.sh | 6 +-- src/jsii-pacmak/library_scripts.sh | 28 +++++++------- src/jsii-rosetta/install.sh | 6 +-- src/jsii-rosetta/library_scripts.sh | 28 +++++++------- src/jsii/install.sh | 6 +-- src/jsii/library_scripts.sh | 28 +++++++------- src/json-server/install.sh | 6 +-- src/json-server/library_scripts.sh | 28 +++++++------- src/k2tf/install.sh | 6 +-- src/k2tf/library_scripts.sh | 28 +++++++------- src/k6/install.sh | 6 +-- src/k6/library_scripts.sh | 28 +++++++------- src/karaf-sdkman/install.sh | 4 +- src/karaf-sdkman/library_scripts.sh | 30 +++++++-------- src/keepercommander/install.sh | 6 +-- src/keepercommander/library_scripts.sh | 28 +++++++------- src/ki-sdkman/install.sh | 4 +- src/ki-sdkman/library_scripts.sh | 30 +++++++-------- src/kind/install.sh | 6 +-- src/kind/library_scripts.sh | 28 +++++++------- src/kobweb-sdkman/install.sh | 4 +- src/kobweb-sdkman/library_scripts.sh | 30 +++++++-------- src/kops/install.sh | 6 +-- src/kops/library_scripts.sh | 28 +++++++------- src/kotlin-sdkman/install.sh | 4 +- src/kotlin-sdkman/library_scripts.sh | 30 +++++++-------- src/kscript-sdkman/install.sh | 4 +- src/kscript-sdkman/library_scripts.sh | 30 +++++++-------- src/kubeclarity-cli/install.sh | 6 +-- src/kubeclarity-cli/library_scripts.sh | 28 +++++++------- src/kubectl-asdf/install.sh | 6 +-- src/kubectl-asdf/library_scripts.sh | 30 +++++++-------- src/kubectx-kubens/install.sh | 8 ++-- src/kubectx-kubens/library_scripts.sh | 28 +++++++------- src/kyverno-cli/install.sh | 6 +-- src/kyverno-cli/library_scripts.sh | 28 +++++++------- src/lastpass-cli-homebrew/install.sh | 4 +- src/lastpass-cli-homebrew/library_scripts.sh | 30 +++++++-------- src/layrry-sdkman/install.sh | 4 +- src/layrry-sdkman/library_scripts.sh | 30 +++++++-------- src/lean-asdf/install.sh | 6 +-- src/lean-asdf/library_scripts.sh | 30 +++++++-------- src/lefthook-asdf/install.sh | 6 +-- src/lefthook-asdf/library_scripts.sh | 30 +++++++-------- src/leiningen-sdkman/install.sh | 4 +- src/leiningen-sdkman/library_scripts.sh | 30 +++++++-------- src/lektor/install.sh | 6 +-- src/lektor/library_scripts.sh | 28 +++++++------- src/lerna-npm/install.sh | 6 +-- src/lerna-npm/library_scripts.sh | 28 +++++++------- src/less/install.sh | 6 +-- src/less/library_scripts.sh | 28 +++++++------- src/levant-asdf/install.sh | 6 +-- src/levant-asdf/library_scripts.sh | 30 +++++++-------- src/lighthouse-cli/install.sh | 6 +-- src/lighthouse-cli/library_scripts.sh | 28 +++++++------- src/linkerd2-cli-edge/install.sh | 6 +-- src/linkerd2-cli-edge/library_scripts.sh | 28 +++++++------- src/linkerd2-cli-stable/install.sh | 6 +-- src/linkerd2-cli-stable/library_scripts.sh | 28 +++++++------- src/linode-cli/install.sh | 6 +-- src/linode-cli/library_scripts.sh | 28 +++++++------- src/lite-server/install.sh | 6 +-- src/lite-server/library_scripts.sh | 28 +++++++------- src/live-server/install.sh | 6 +-- src/live-server/library_scripts.sh | 28 +++++++------- src/localstack/library_scripts.sh | 28 +++++++------- src/localtunnel-npm/install.sh | 6 +-- src/localtunnel-npm/library_scripts.sh | 28 +++++++------- src/mackup/install.sh | 6 +-- src/mackup/library_scripts.sh | 28 +++++++------- src/markdownlint-cli/install.sh | 6 +-- src/markdownlint-cli/library_scripts.sh | 28 +++++++------- src/markdownlint-cli2/install.sh | 6 +-- src/markdownlint-cli2/library_scripts.sh | 28 +++++++------- src/maven-sdkman/install.sh | 4 +- src/maven-sdkman/library_scripts.sh | 30 +++++++-------- src/meltano/install.sh | 6 +-- src/meltano/library_scripts.sh | 28 +++++++------- src/memcached-exporter/install.sh | 6 +-- src/memcached-exporter/library_scripts.sh | 28 +++++++------- src/micro/install.sh | 4 +- src/micro/library_scripts.sh | 30 +++++++-------- src/micronaut-sdkman/install.sh | 4 +- src/micronaut-sdkman/library_scripts.sh | 30 +++++++-------- src/mitmproxy/install.sh | 6 +-- src/mitmproxy/library_scripts.sh | 28 +++++++------- src/mkcert/install.sh | 6 +-- src/mkcert/library_scripts.sh | 28 +++++++------- src/mkdocs/install.sh | 6 +-- src/mkdocs/library_scripts.sh | 28 +++++++------- src/mlocate-apt-get/install.sh | 4 +- src/mlocate-apt-get/library_scripts.sh | 30 +++++++-------- src/mlton-asdf/install.sh | 6 +-- src/mlton-asdf/library_scripts.sh | 30 +++++++-------- src/mocha/install.sh | 6 +-- src/mocha/library_scripts.sh | 28 +++++++------- src/mongodb-atlas-cli-homebrew/install.sh | 4 +- .../library_scripts.sh | 30 +++++++-------- src/mongosh-homebrew/install.sh | 4 +- src/mongosh-homebrew/library_scripts.sh | 30 +++++++-------- src/mosh-apt-get/install.sh | 4 +- src/mosh-apt-get/library_scripts.sh | 30 +++++++-------- src/mosh-homebrew/install.sh | 4 +- src/mosh-homebrew/library_scripts.sh | 30 +++++++-------- src/mulefd-sdkman/install.sh | 4 +- src/mulefd-sdkman/library_scripts.sh | 30 +++++++-------- src/mvnd-sdkman/install.sh | 4 +- src/mvnd-sdkman/library_scripts.sh | 30 +++++++-------- src/mybatis-sdkman/install.sh | 4 +- src/mybatis-sdkman/library_scripts.sh | 30 +++++++-------- src/mypy/install.sh | 6 +-- src/mypy/library_scripts.sh | 28 +++++++------- src/mysql-homebrew/install.sh | 4 +- src/mysql-homebrew/library_scripts.sh | 30 +++++++-------- src/mysqld-exporter/install.sh | 6 +-- src/mysqld-exporter/library_scripts.sh | 28 +++++++------- src/n8n/install.sh | 6 +-- src/n8n/library_scripts.sh | 28 +++++++------- src/nancy/install.sh | 6 +-- src/nancy/library_scripts.sh | 28 +++++++------- src/navi/install.sh | 6 +-- src/navi/library_scripts.sh | 28 +++++++------- src/ncdu/install.sh | 4 +- src/ncdu/library_scripts.sh | 30 +++++++-------- src/neko-asdf/install.sh | 6 +-- src/neko-asdf/library_scripts.sh | 30 +++++++-------- src/neo4jmigrations-sdkman/install.sh | 4 +- src/neo4jmigrations-sdkman/library_scripts.sh | 30 +++++++-------- src/neofetch/install.sh | 4 +- src/neofetch/library_scripts.sh | 30 +++++++-------- src/neovim-apt-get/install.sh | 6 +-- src/neovim-apt-get/library_scripts.sh | 30 +++++++-------- src/neovim-homebrew/install.sh | 4 +- src/neovim-homebrew/library_scripts.sh | 30 +++++++-------- src/nestjs-cli/install.sh | 6 +-- src/nestjs-cli/library_scripts.sh | 28 +++++++------- src/netdata/library_scripts.sh | 30 +++++++-------- src/netlify-cli/install.sh | 6 +-- src/netlify-cli/library_scripts.sh | 28 +++++++------- src/nim-asdf/install.sh | 6 +-- src/nim-asdf/library_scripts.sh | 30 +++++++-------- src/ninja-asdf/install.sh | 6 +-- src/ninja-asdf/library_scripts.sh | 30 +++++++-------- src/nmap-apt-get/install.sh | 4 +- src/nmap-apt-get/library_scripts.sh | 30 +++++++-------- src/nmap-homebrew/install.sh | 4 +- src/nmap-homebrew/library_scripts.sh | 30 +++++++-------- src/nnn-apt-get/install.sh | 4 +- src/nnn-apt-get/library_scripts.sh | 30 +++++++-------- src/nnn-homebrew/install.sh | 4 +- src/nnn-homebrew/library_scripts.sh | 30 +++++++-------- src/node-asdf/install.sh | 6 +-- src/node-asdf/library_scripts.sh | 28 +++++++------- src/node-exporter/install.sh | 6 +-- src/node-exporter/library_scripts.sh | 28 +++++++------- src/nomad-asdf/install.sh | 6 +-- src/nomad-asdf/library_scripts.sh | 30 +++++++-------- src/nox/install.sh | 6 +-- src/nox/library_scripts.sh | 28 +++++++------- src/npm-package/install.sh | 2 +- src/nx-npm/install.sh | 6 +-- src/nx-npm/library_scripts.sh | 28 +++++++------- src/ocaml-asdf/install.sh | 6 +-- src/ocaml-asdf/library_scripts.sh | 30 +++++++-------- src/oclif/install.sh | 6 +-- src/oclif/library_scripts.sh | 28 +++++++------- src/opa/install.sh | 6 +-- src/opa/library_scripts.sh | 28 +++++++------- src/opam-asdf/install.sh | 6 +-- src/opam-asdf/library_scripts.sh | 30 +++++++-------- src/ory-cli/install.sh | 6 +-- src/ory-cli/library_scripts.sh | 28 +++++++------- src/ory-hydra/install.sh | 6 +-- src/ory-hydra/library_scripts.sh | 28 +++++++------- src/ory-kratos/install.sh | 6 +-- src/ory-kratos/library_scripts.sh | 28 +++++++------- src/ory-oathkeeper/install.sh | 6 +-- src/ory-oathkeeper/library_scripts.sh | 28 +++++++------- src/packer-asdf/install.sh | 6 +-- src/packer-asdf/library_scripts.sh | 30 +++++++-------- src/pandoc/install.sh | 6 +-- src/pandoc/library_scripts.sh | 28 +++++++------- src/pass-apt-get/install.sh | 4 +- src/pass-apt-get/library_scripts.sh | 30 +++++++-------- src/pdm/install.sh | 6 +-- src/pdm/library_scripts.sh | 28 +++++++------- src/peco-asdf/install.sh | 6 +-- src/peco-asdf/library_scripts.sh | 30 +++++++-------- src/perl-asdf/install.sh | 6 +-- src/perl-asdf/library_scripts.sh | 30 +++++++-------- src/pierrot-sdkman/install.sh | 4 +- src/pierrot-sdkman/library_scripts.sh | 30 +++++++-------- src/pipenv/install.sh | 6 +-- src/pipenv/library_scripts.sh | 28 +++++++------- src/pipx-package/install.sh | 1 + src/pnpm/install.sh | 6 +-- src/pnpm/library_scripts.sh | 28 +++++++------- src/podman-homebrew/install.sh | 4 +- src/podman-homebrew/library_scripts.sh | 30 +++++++-------- src/poetry/install.sh | 6 +-- src/poetry/library_scripts.sh | 28 +++++++------- src/pomchecker-sdkman/install.sh | 4 +- src/pomchecker-sdkman/library_scripts.sh | 30 +++++++-------- src/poppler-utils-apt-get/README.md | 24 ++++++------ src/poppler-utils-apt-get/install.sh | 6 +-- src/poppler-utils-apt-get/library_scripts.sh | 28 +++++++------- src/powerbi-visuals-tools/install.sh | 6 +-- src/powerbi-visuals-tools/library_scripts.sh | 28 +++++++------- src/powershell/install.sh | 6 +-- src/powershell/library_scripts.sh | 28 +++++++------- src/pre-commit/install.sh | 6 +-- src/pre-commit/library_scripts.sh | 28 +++++++------- src/prettier/install.sh | 6 +-- src/prettier/library_scripts.sh | 28 +++++++------- src/prisma/install.sh | 6 +-- src/prisma/library_scripts.sh | 28 +++++++------- src/projen/install.sh | 6 +-- src/projen/library_scripts.sh | 28 +++++++------- src/prometheus/install.sh | 6 +-- src/prometheus/library_scripts.sh | 28 +++++++------- src/promlens/install.sh | 6 +-- src/promlens/library_scripts.sh | 28 +++++++------- src/protoc-asdf/install.sh | 6 +-- src/protoc-asdf/library_scripts.sh | 30 +++++++-------- src/protoc/install.sh | 6 +-- src/protoc/library_scripts.sh | 28 +++++++------- src/pushgateway/install.sh | 6 +-- src/pushgateway/library_scripts.sh | 28 +++++++------- src/pyinfra/install.sh | 6 +-- src/pyinfra/library_scripts.sh | 28 +++++++------- src/pylint/install.sh | 6 +-- src/pylint/library_scripts.sh | 28 +++++++------- src/pyoxidizer/install.sh | 6 +-- src/pyoxidizer/library_scripts.sh | 28 +++++++------- src/pyscaffold/install.sh | 6 +-- src/pyscaffold/library_scripts.sh | 28 +++++++------- src/qrcode/install.sh | 6 +-- src/qrcode/library_scripts.sh | 28 +++++++------- src/quarkus-sdkman/install.sh | 4 +- src/quarkus-sdkman/library_scripts.sh | 30 +++++++-------- src/quasar-cli/install.sh | 6 +-- src/quasar-cli/library_scripts.sh | 28 +++++++------- src/raku-asdf/install.sh | 8 ++-- src/raku-asdf/library_scripts.sh | 30 +++++++-------- src/rclone/install.sh | 6 +-- src/rclone/library_scripts.sh | 28 +++++++------- src/redis-homebrew/install.sh | 4 +- src/redis-homebrew/library_scripts.sh | 30 +++++++-------- src/rekor-cli/install.sh | 6 +-- src/rekor-cli/library_scripts.sh | 28 +++++++------- src/renovate-cli/install.sh | 6 +-- src/renovate-cli/library_scripts.sh | 28 +++++++------- src/ripgrep/install.sh | 6 +-- src/ripgrep/library_scripts.sh | 28 +++++++------- src/rollup/install.sh | 6 +-- src/rollup/library_scripts.sh | 28 +++++++------- src/ruby-asdf/install.sh | 8 ++-- src/ruby-asdf/library_scripts.sh | 28 +++++++------- src/ruff/install.sh | 6 +-- src/ruff/library_scripts.sh | 28 +++++++------- src/salesforce-cli/install.sh | 6 +-- src/salesforce-cli/library_scripts.sh | 28 +++++++------- src/salesforce-sfdx/install.sh | 6 +-- src/salesforce-sfdx/library_scripts.sh | 28 +++++++------- src/sanity-cli/install.sh | 6 +-- src/sanity-cli/library_scripts.sh | 28 +++++++------- src/sap-piper/install.sh | 6 +-- src/sap-piper/library_scripts.sh | 28 +++++++------- src/sbt-sdkman/install.sh | 4 +- src/sbt-sdkman/library_scripts.sh | 30 +++++++-------- src/scala-sdkman/install.sh | 4 +- src/scala-sdkman/library_scripts.sh | 30 +++++++-------- src/scalacli-sdkman/install.sh | 4 +- src/scalacli-sdkman/library_scripts.sh | 30 +++++++-------- src/scancode-toolkit/install.sh | 6 +-- src/scancode-toolkit/library_scripts.sh | 28 +++++++------- src/schemacrawler-sdkman/install.sh | 4 +- src/schemacrawler-sdkman/library_scripts.sh | 30 +++++++-------- src/sentinel-asdf/install.sh | 6 +-- src/sentinel-asdf/library_scripts.sh | 30 +++++++-------- src/serf-asdf/install.sh | 6 +-- src/serf-asdf/library_scripts.sh | 30 +++++++-------- src/shfmt/install.sh | 6 +-- src/shfmt/library_scripts.sh | 30 +++++++-------- src/shopify-cli/install.sh | 8 ++-- src/shopify-cli/library_scripts.sh | 28 +++++++------- src/sigstore-python/install.sh | 6 +-- src/sigstore-python/library_scripts.sh | 28 +++++++------- src/snyk-cli/install.sh | 6 +-- src/snyk-cli/library_scripts.sh | 28 +++++++------- src/sops/install.sh | 6 +-- src/sops/library_scripts.sh | 28 +++++++------- src/spacectl/install.sh | 6 +-- src/spacectl/library_scripts.sh | 28 +++++++------- src/spark-sdkman/install.sh | 4 +- src/spark-sdkman/library_scripts.sh | 30 +++++++-------- src/spicedb/install.sh | 6 +-- src/spicedb/library_scripts.sh | 28 +++++++------- src/springboot-sdkman/install.sh | 4 +- src/springboot-sdkman/library_scripts.sh | 30 +++++++-------- src/sqlfluff/install.sh | 6 +-- src/sqlfluff/library_scripts.sh | 28 +++++++------- src/squarespace-server/install.sh | 6 +-- src/squarespace-server/library_scripts.sh | 28 +++++++------- src/sshoogr-sdkman/install.sh | 4 +- src/sshoogr-sdkman/library_scripts.sh | 30 +++++++-------- src/starship-homebrew/install.sh | 4 +- src/starship-homebrew/library_scripts.sh | 30 +++++++-------- src/starship/install.sh | 6 +-- src/starship/library_scripts.sh | 28 +++++++------- src/statsd-exporter/install.sh | 6 +-- src/statsd-exporter/library_scripts.sh | 28 +++++++------- src/stew/install.sh | 6 +-- src/stew/library_scripts.sh | 28 +++++++------- src/supabase-cli/install.sh | 6 +-- src/supabase-cli/library_scripts.sh | 28 +++++++------- src/surge-cli/install.sh | 6 +-- src/surge-cli/library_scripts.sh | 28 +++++++------- src/sv2v/install.sh | 6 +-- src/sv2v/library_scripts.sh | 28 +++++++------- src/svu-asdf/install.sh | 6 +-- src/svu-asdf/library_scripts.sh | 30 +++++++-------- src/syft/install.sh | 6 +-- src/syft/library_scripts.sh | 28 +++++++------- src/syntaqx-serve/install.sh | 6 +-- src/syntaqx-serve/library_scripts.sh | 28 +++++++------- src/tailscale/install.sh | 8 ++-- src/tailscale/library_scripts.sh | 30 +++++++-------- src/taxi-sdkman/install.sh | 4 +- src/taxi-sdkman/library_scripts.sh | 30 +++++++-------- src/tea/install.sh | 6 +-- src/tea/library_scripts.sh | 28 +++++++------- src/tekton-cli/install.sh | 6 +-- src/tekton-cli/library_scripts.sh | 28 +++++++------- src/tempo/install.sh | 6 +-- src/tempo/library_scripts.sh | 28 +++++++------- src/temporal-cli/install.sh | 6 +-- src/temporal-cli/library_scripts.sh | 28 +++++++------- src/terracognita/install.sh | 6 +-- src/terracognita/library_scripts.sh | 28 +++++++------- src/terraform-asdf/install.sh | 6 +-- src/terraform-asdf/library_scripts.sh | 30 +++++++-------- src/terraform-docs/install.sh | 6 +-- src/terraform-docs/library_scripts.sh | 28 +++++++------- src/terraform-ls-asdf/install.sh | 6 +-- src/terraform-ls-asdf/library_scripts.sh | 30 +++++++-------- src/terraformer/install.sh | 6 +-- src/terraformer/library_scripts.sh | 28 +++++++------- src/terragrunt/install.sh | 6 +-- src/terragrunt/library_scripts.sh | 28 +++++++------- src/terramate/install.sh | 6 +-- src/terramate/library_scripts.sh | 28 +++++++------- src/tfc-agent-asdf/install.sh | 6 +-- src/tfc-agent-asdf/library_scripts.sh | 30 +++++++-------- src/tfcdk-cli/install.sh | 6 +-- src/tfcdk-cli/library_scripts.sh | 28 +++++++------- src/tfenv-homebrew/install.sh | 4 +- src/tfenv-homebrew/library_scripts.sh | 30 +++++++-------- src/tfsec/install.sh | 6 +-- src/tfsec/library_scripts.sh | 28 +++++++------- src/tfswitch/install.sh | 6 +-- src/tfswitch/library_scripts.sh | 30 +++++++-------- src/tldr/install.sh | 6 +-- src/tldr/library_scripts.sh | 28 +++++++------- src/tmate/install.sh | 4 +- src/tmate/library_scripts.sh | 30 +++++++-------- src/tmux-apt-get/install.sh | 4 +- src/tmux-apt-get/library_scripts.sh | 30 +++++++-------- src/tmux-homebrew/install.sh | 4 +- src/tmux-homebrew/library_scripts.sh | 30 +++++++-------- src/tomcat-sdkman/install.sh | 4 +- src/tomcat-sdkman/library_scripts.sh | 30 +++++++-------- src/tooljet-cli/install.sh | 6 +-- src/tooljet-cli/library_scripts.sh | 28 +++++++------- src/toolkit-sdkman/install.sh | 4 +- src/toolkit-sdkman/library_scripts.sh | 30 +++++++-------- src/tox/install.sh | 6 +-- src/tox/library_scripts.sh | 28 +++++++------- src/trello-cli/install.sh | 6 +-- src/trello-cli/library_scripts.sh | 28 +++++++------- src/tridentctl-asdf/install.sh | 6 +-- src/tridentctl-asdf/library_scripts.sh | 30 +++++++-------- src/truffle/install.sh | 6 +-- src/truffle/library_scripts.sh | 28 +++++++------- src/ts-node/README.md | 2 +- src/ts-node/install.sh | 6 +-- src/ts-node/library_scripts.sh | 28 +++++++------- src/tsx/install.sh | 6 +-- src/tsx/library_scripts.sh | 28 +++++++------- src/turborepo-npm/install.sh | 6 +-- src/turborepo-npm/library_scripts.sh | 28 +++++++------- src/twine/install.sh | 6 +-- src/twine/library_scripts.sh | 28 +++++++------- src/typescript/install.sh | 6 +-- src/typescript/library_scripts.sh | 28 +++++++------- src/typst/install.sh | 6 +-- src/typst/library_scripts.sh | 28 +++++++------- src/ufmt/library_scripts.sh | 30 +++++++-------- src/upx/install.sh | 6 +-- src/upx/library_scripts.sh | 28 +++++++------- src/vault-asdf/install.sh | 6 +-- src/vault-asdf/library_scripts.sh | 30 +++++++-------- src/vercel-cli/install.sh | 6 +-- src/vercel-cli/library_scripts.sh | 28 +++++++------- src/vercel-ncc/install.sh | 6 +-- src/vercel-ncc/library_scripts.sh | 28 +++++++------- src/vercel-pkg/install.sh | 6 +-- src/vercel-pkg/library_scripts.sh | 28 +++++++------- src/vercel-release/install.sh | 6 +-- src/vercel-release/library_scripts.sh | 28 +++++++------- src/vercel-serve/install.sh | 6 +-- src/vercel-serve/library_scripts.sh | 28 +++++++------- src/vscode-cli/library_scripts.sh | 30 +++++++-------- src/vscode-server/library_scripts.sh | 30 +++++++-------- src/vtop/install.sh | 6 +-- src/vtop/library_scripts.sh | 28 +++++++------- src/vue-cli/install.sh | 6 +-- src/vue-cli/library_scripts.sh | 28 +++++++------- src/vulture/install.sh | 6 +-- src/vulture/library_scripts.sh | 28 +++++++------- src/w3m-apt-get/install.sh | 4 +- src/w3m-apt-get/library_scripts.sh | 30 +++++++-------- src/w3m-homebrew/install.sh | 4 +- src/w3m-homebrew/library_scripts.sh | 30 +++++++-------- src/waypoint-asdf/install.sh | 6 +-- src/waypoint-asdf/library_scripts.sh | 30 +++++++-------- src/webtau-sdkman/install.sh | 4 +- src/webtau-sdkman/library_scripts.sh | 30 +++++++-------- src/wget-apt-get/install.sh | 4 +- src/wget-apt-get/library_scripts.sh | 30 +++++++-------- src/wget-homebrew/install.sh | 4 +- src/wget-homebrew/library_scripts.sh | 30 +++++++-------- src/wireguard-apt-get/install.sh | 4 +- src/wireguard-apt-get/library_scripts.sh | 30 +++++++-------- src/xmrig/install.sh | 6 +-- src/xmrig/library_scripts.sh | 28 +++++++------- src/xonsh/install.sh | 6 +-- src/xonsh/library_scripts.sh | 28 +++++++------- src/yamllint/install.sh | 6 +-- src/yamllint/library_scripts.sh | 28 +++++++------- src/yapf/install.sh | 6 +-- src/yapf/library_scripts.sh | 28 +++++++------- src/youtube-dl/install.sh | 6 +-- src/youtube-dl/library_scripts.sh | 28 +++++++------- src/youtubeuploader/install.sh | 6 +-- src/youtubeuploader/library_scripts.sh | 28 +++++++------- src/yt-dlp/install.sh | 6 +-- src/yt-dlp/library_scripts.sh | 28 +++++++------- src/zsh-plugins/install.sh | 4 +- test/asdf-package/install_terraform_alpine.sh | 2 +- test/npm-package/scenarios.json | 4 +- 894 files changed, 7681 insertions(+), 7679 deletions(-) diff --git a/.github/actions/list-changed-features-action/action.yaml b/.github/actions/list-changed-features-action/action.yaml index 138004def..7629d5fae 100644 --- a/.github/actions/list-changed-features-action/action.yaml +++ b/.github/actions/list-changed-features-action/action.yaml @@ -30,16 +30,16 @@ runs: with: list-files: json filters: | - src: + src: - src/**/*.sh - test: + test: - test/**/*.sh - id: feature-finder shell: bash run: | echo '${{ steps.filter.outputs.changes }}' - + # prepare all features all_features=$(sudo tree -J -d ./src | sudo jq -c '.[0].contents | map(.name)') @@ -52,7 +52,7 @@ runs: jq -sc '.[0] + .[1] | unique' changed_src_features.json changed_test_features.json > changed_features.json changed_features=$(cat changed_features.json) - # outputs + # outputs echo "all_features=$all_features" >> $GITHUB_OUTPUT echo "changed_features=$changed_features" >> $GITHUB_OUTPUT diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c169370a0..9460eb0a9 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -31,6 +31,6 @@ jobs: publish-features: "true" base-path-to-features: "./src" generate-docs: "false" - + env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 39beabd23..85cd6f639 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -29,11 +29,11 @@ on: - cron: '0 1 * * *' pull_request: - + push: branches: - main - + jobs: find-features: @@ -44,10 +44,10 @@ jobs: all-features: ${{ steps.list-features.outputs.all_features }} changed-features: ${{ steps.list-features.outputs.changed_features }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v3 - run: | echo ${{ inputs.enabled }} - echo ${{ github.event_name }} + echo ${{ github.event_name }} - id: list-features uses: ./.github/actions/list-changed-features-action @@ -68,14 +68,14 @@ jobs: elif [ ${{ github.event_name }} == 'push' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT - + elif [ ${{ github.event_name }} == 'workflow_dispatch' ]; then if [ ${{ inputs.on_changes_only }} == 'true' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT else echo 'features_to_test=${{ needs.find-features.outputs.all-features }}' >> $GITHUB_OUTPUT fi - + elif [ ${{ github.event_name }} == 'workflow_call' ]; then if [ ${{ inputs.on_changes_only }} == 'true' ]; then echo 'features_to_test=${{ needs.find-features.outputs.changed-features }}' >> $GITHUB_OUTPUT @@ -142,12 +142,12 @@ jobs: # install shellcheck comma_separated_features=${{ matrix.features }} - - for i in ${comma_separated_features//,/ } + + for i in ${comma_separated_features//,/ } do - shellcheck --severity=error -e SC2148 src/"$i"/*.sh + shellcheck --severity=error -e SC2148 src/"$i"/*.sh done - + test-global: runs-on: ubuntu-latest continue-on-error: true diff --git a/.vscode/settings.json b/.vscode/settings.json index 8c5b7ed6a..07fa08862 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { "bashIde.explainshellEndpoint": "http://localhost:5000", + "diffEditor.ignoreTrimWhitespace": false } \ No newline at end of file diff --git a/README.md b/README.md index b0fb5ef75..275c9530c 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ 💻 Works with devcontainers \ ☁️ Works with GitHub Codespaces \ -👀 Don't see your feature here? [🔥Suggest a new feature!🔥](https://github.com/devcontainers-contrib/features/issues/new?template=suggest-feature.yaml) +👀 Don't see your feature here? [🔥Suggest a new feature!🔥](https://github.com/devcontainers-contrib/features/issues/new?template=suggest-feature.yaml) ## Usage diff --git a/archive/src/actions-runner-noexternals/install.sh b/archive/src/actions-runner-noexternals/install.sh index a4ad8a005..5345f00db 100755 --- a/archive/src/actions-runner-noexternals/install.sh +++ b/archive/src/actions-runner-noexternals/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers/features/dotnet:1.1.3" \ --option dotnetVersion="$DOTNETVERSION" --option runtimeOnly='true' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='actions/runner' --option binaryNames='run.sh' --option binLocation='$_REMOTE_USER_HOME/.local/bin' --option version="$VERSION" --option libLocation='$_REMOTE_USER_HOME' --option libName='actions-runner' --option assetRegex='^(?!.*(noruntime))(?!.*(trimmedpackages))(.*-noexternals.*)' - + $nanolayer_location \ @@ -33,7 +33,7 @@ $nanolayer_location \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='rm /home/vscode/.local/bin/run.sh && \ chown -hR ${_REMOTE_USER}:${_REMOTE_USER} $_REMOTE_USER_HOME/actions-runner' - + echo "GitHub Actions Runner now installed at $_REMOTE_USER_HOME/actions-runner" diff --git a/archive/src/actions-runner-noexternals/library_scripts.sh b/archive/src/actions-runner-noexternals/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/actions-runner-noexternals/library_scripts.sh +++ b/archive/src/actions-runner-noexternals/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/actions-runner-noruntime-noexternals/install.sh b/archive/src/actions-runner-noruntime-noexternals/install.sh index 2e05e49ee..60257ca11 100755 --- a/archive/src/actions-runner-noruntime-noexternals/install.sh +++ b/archive/src/actions-runner-noruntime-noexternals/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers/features/dotnet:1.1.3" \ --option dotnetVersion="$DOTNETVERSION" --option runtimeOnly='true' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='actions/runner' --option binaryNames='run.sh' --option binLocation='$_REMOTE_USER_HOME/.local/bin' --option version="$VERSION" --option libLocation='$_REMOTE_USER_HOME' --option libName='actions-runner' --option assetRegex='^(?!.*(trimmedpackages))(.*-noruntime.*)(.*-noexternals.*)' - + $nanolayer_location \ @@ -33,7 +33,7 @@ $nanolayer_location \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='rm /home/vscode/.local/bin/run.sh && \ chown -hR ${_REMOTE_USER}:${_REMOTE_USER} $_REMOTE_USER_HOME/actions-runner' - + echo "GitHub Actions Runner now installed at $_REMOTE_USER_HOME/actions-runner" diff --git a/archive/src/actions-runner-noruntime-noexternals/library_scripts.sh b/archive/src/actions-runner-noruntime-noexternals/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/actions-runner-noruntime-noexternals/library_scripts.sh +++ b/archive/src/actions-runner-noruntime-noexternals/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/actions-runner-noruntime/install.sh b/archive/src/actions-runner-noruntime/install.sh index 29e6c685d..d8cea5e1f 100755 --- a/archive/src/actions-runner-noruntime/install.sh +++ b/archive/src/actions-runner-noruntime/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers/features/dotnet:1.1.3" \ --option dotnetVersion="$DOTNETVERSION" --option runtimeOnly='true' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='actions/runner' --option binaryNames='run.sh' --option binLocation='$_REMOTE_USER_HOME/.local/bin' --option version="$VERSION" --option libLocation='$_REMOTE_USER_HOME' --option libName='actions-runner' --option assetRegex='^(?!.*(noexternals))(?!.*(trimmedpackages))(.*-noruntime.*)' - + $nanolayer_location \ @@ -33,7 +33,7 @@ $nanolayer_location \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='rm /home/vscode/.local/bin/run.sh && \ chown -hR ${_REMOTE_USER}:${_REMOTE_USER} $_REMOTE_USER_HOME/actions-runner' - + echo "GitHub Actions Runner now installed at $_REMOTE_USER_HOME/actions-runner" diff --git a/archive/src/actions-runner-noruntime/library_scripts.sh b/archive/src/actions-runner-noruntime/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/actions-runner-noruntime/library_scripts.sh +++ b/archive/src/actions-runner-noruntime/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/age-keygen/install.sh b/archive/src/age-keygen/install.sh index 43c7a71c8..7cc46ae1e 100755 --- a/archive/src/age-keygen/install.sh +++ b/archive/src/age-keygen/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='filosottile/age' --option binaryNames='age-keygen' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/age-keygen/library_scripts.sh b/archive/src/age-keygen/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/age-keygen/library_scripts.sh +++ b/archive/src/age-keygen/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/age/install.sh b/archive/src/age/install.sh index 0e143987f..629d515c8 100755 --- a/archive/src/age/install.sh +++ b/archive/src/age/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='filosottile/age' --option binaryNames='age' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/age/library_scripts.sh b/archive/src/age/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/age/library_scripts.sh +++ b/archive/src/age/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/airplane-cli/install.sh b/archive/src/airplane-cli/install.sh index 43cc7239a..07983629a 100755 --- a/archive/src/airplane-cli/install.sh +++ b/archive/src/airplane-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='airplanedev/cli' --option binaryNames='airplane' --option version="$VERSION" --option assetRegex='.*(tar).*' - + echo 'Done!' diff --git a/archive/src/airplane-cli/library_scripts.sh b/archive/src/airplane-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/airplane-cli/library_scripts.sh +++ b/archive/src/airplane-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/bitwarden-cli/install.sh b/archive/src/bitwarden-cli/install.sh index e72285a26..2bbae59e0 100755 --- a/archive/src/bitwarden-cli/install.sh +++ b/archive/src/bitwarden-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='bitwarden/clients' --option binaryNames='bw' --option releaseTagRegex='.*cli.*' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/bitwarden-cli/library_scripts.sh b/archive/src/bitwarden-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/bitwarden-cli/library_scripts.sh +++ b/archive/src/bitwarden-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/boundary-asdf/install.sh b/archive/src/boundary-asdf/install.sh index 83c619ec6..9fa1206aa 100755 --- a/archive/src/boundary-asdf/install.sh +++ b/archive/src/boundary-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='boundary' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/boundary-asdf/library_scripts.sh b/archive/src/boundary-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/boundary-asdf/library_scripts.sh +++ b/archive/src/boundary-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/btm/install.sh b/archive/src/btm/install.sh index ba3cc2eb4..eaa066a70 100755 --- a/archive/src/btm/install.sh +++ b/archive/src/btm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ClementTsang/bottom' --option binaryNames='btm' --option version="$VERSION" --option assetRegex='^(?!.*(2-17))' - + echo 'Done!' diff --git a/archive/src/btm/library_scripts.sh b/archive/src/btm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/btm/library_scripts.sh +++ b/archive/src/btm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/cert-manager/install.sh b/archive/src/cert-manager/install.sh index ebfe21f73..e6719e309 100755 --- a/archive/src/cert-manager/install.sh +++ b/archive/src/cert-manager/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='cert-manager/cert-manager' --option binaryNames='cmctl' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/cert-manager/library_scripts.sh b/archive/src/cert-manager/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/cert-manager/library_scripts.sh +++ b/archive/src/cert-manager/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/cmctl-asdf/install.sh b/archive/src/cmctl-asdf/install.sh index 5cd49920f..0281472c6 100755 --- a/archive/src/cmctl-asdf/install.sh +++ b/archive/src/cmctl-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='cmctl' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/cmctl-asdf/library_scripts.sh b/archive/src/cmctl-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/cmctl-asdf/library_scripts.sh +++ b/archive/src/cmctl-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/codenotary-cas/install.sh b/archive/src/codenotary-cas/install.sh index b23395e11..1d132e019 100755 --- a/archive/src/codenotary-cas/install.sh +++ b/archive/src/codenotary-cas/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/cas' --option binaryNames='cas' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/codenotary-cas/library_scripts.sh b/archive/src/codenotary-cas/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/codenotary-cas/library_scripts.sh +++ b/archive/src/codenotary-cas/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/croc/install.sh b/archive/src/croc/install.sh index 41660684a..6b6c53d8d 100755 --- a/archive/src/croc/install.sh +++ b/archive/src/croc/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='schollz/croc' --option binaryNames='croc' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/croc/library_scripts.sh b/archive/src/croc/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/croc/library_scripts.sh +++ b/archive/src/croc/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/cue-asdf/install.sh b/archive/src/cue-asdf/install.sh index eb4a27c45..07447d916 100755 --- a/archive/src/cue-asdf/install.sh +++ b/archive/src/cue-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='cue' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/cue-asdf/library_scripts.sh b/archive/src/cue-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/cue-asdf/library_scripts.sh +++ b/archive/src/cue-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/edge-impulse-cli/install.sh b/archive/src/edge-impulse-cli/install.sh index 0ac05350e..b33cf282a 100755 --- a/archive/src/edge-impulse-cli/install.sh +++ b/archive/src/edge-impulse-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='edge-impulse-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/edge-impulse-cli/library_scripts.sh b/archive/src/edge-impulse-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/edge-impulse-cli/library_scripts.sh +++ b/archive/src/edge-impulse-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/elixir-asdf/install.sh b/archive/src/elixir-asdf/install.sh index 1914c51d6..fabd5c3c6 100755 --- a/archive/src/elixir-asdf/install.sh +++ b/archive/src/elixir-asdf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.6" \ --option packages='build-essential,autoconf,m4,libncurses5-dev,libwxgtk3.*-dev,libwxgtk-webview3.*-dev,libgl1-mesa-dev,libglu1-mesa-dev,libpng-dev,libssh-dev,unixodbc-dev,xsltproc,fop,libxml2-utils,libncurses-dev,openjdk-1*-jdk,procps' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='erlang' --option version="$ERLANGVERSION" - + $nanolayer_location \ @@ -32,7 +32,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='elixir' --option version="$ELIXIRVERSION" - + echo 'Done!' diff --git a/archive/src/elixir-asdf/library_scripts.sh b/archive/src/elixir-asdf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/elixir-asdf/library_scripts.sh +++ b/archive/src/elixir-asdf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/erlang-asdf/install.sh b/archive/src/erlang-asdf/install.sh index afc941415..5e6af376c 100755 --- a/archive/src/erlang-asdf/install.sh +++ b/archive/src/erlang-asdf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.6" \ --option packages='build-essential,autoconf,m4,libncurses5-dev,libwxgtk3.*-dev,libwxgtk-webview3.*-dev,libgl1-mesa-dev,libglu1-mesa-dev,libpng-dev,libssh-dev,unixodbc-dev,xsltproc,fop,libxml2-utils,libncurses-dev,openjdk-1*-jdk,procps' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='erlang' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/erlang-asdf/library_scripts.sh b/archive/src/erlang-asdf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/erlang-asdf/library_scripts.sh +++ b/archive/src/erlang-asdf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/ffmpeg-homebrew/install.sh b/archive/src/ffmpeg-homebrew/install.sh index 5b6fd053b..345a84042 100755 --- a/archive/src/ffmpeg-homebrew/install.sh +++ b/archive/src/ffmpeg-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/archive/src/ffmpeg-homebrew/library_scripts.sh b/archive/src/ffmpeg-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/ffmpeg-homebrew/library_scripts.sh +++ b/archive/src/ffmpeg-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/gh-release/install.sh b/archive/src/gh-release/install.sh index 172cc52fb..75cfa7c8a 100755 --- a/archive/src/gh-release/install.sh +++ b/archive/src/gh-release/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.6" diff --git a/archive/src/gh-release/library_scripts.sh b/archive/src/gh-release/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/gh-release/library_scripts.sh +++ b/archive/src/gh-release/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/homebrew-package/install.sh b/archive/src/homebrew-package/install.sh index 100d52275..08bbddc55 100755 --- a/archive/src/homebrew-package/install.sh +++ b/archive/src/homebrew-package/install.sh @@ -13,7 +13,7 @@ if [ -z "$PACKAGE" ]; then fi if [ "$(id -u)" -ne 0 ]; then - echo -e 'Script must be run as + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' exit 1 fi @@ -31,7 +31,7 @@ check_packages() { ensure_curl () { if ! type curl >/dev/null 2>&1; then apt-get update -y && apt-get -y install --no-install-recommends curl ca-certificates - fi + fi } @@ -44,11 +44,11 @@ install_via_homebrew() { # install Homebrew if does not exists if ! type brew >/dev/null 2>&1; then echo "Installing Homebrew..." - + # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer - # `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, - # and if missing - will download a temporary copy that automatically get deleted at the end + # `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, + # and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" @@ -59,16 +59,16 @@ install_via_homebrew() { --option shallow_clone='true' --option update="true" source /etc/profile.d/nanolayer-homebrew.sh fi - + if [ "$version" = "latest" ]; then package_full="$package" else package_full="${package}@${version}" fi - # Solves CVE-2022-24767 mitigation in Git >2.35.2 + # Solves CVE-2022-24767 mitigation in Git >2.35.2 # For more information: https://github.blog/2022-04-12-git-security-vulnerability-announced/ - git config --system --add safe.directory "$(brew --prefix)/Homebrew/Library/Taps/homebrew/homebrew-core" + git config --system --add safe.directory "$(brew --prefix)/Homebrew/Library/Taps/homebrew/homebrew-core" su - "$_REMOTE_USER" </dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/ko/install.sh b/archive/src/ko/install.sh index 5d81c7fc7..633c0d564 100755 --- a/archive/src/ko/install.sh +++ b/archive/src/ko/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ko-build/ko' --option binaryNames='ko' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/ko/library_scripts.sh b/archive/src/ko/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/ko/library_scripts.sh +++ b/archive/src/ko/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/kubescape/install.sh b/archive/src/kubescape/install.sh index d6d89e87c..8dbd68bbd 100755 --- a/archive/src/kubescape/install.sh +++ b/archive/src/kubescape/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='kubescape/kubescape' --option binaryNames='kubescape' --option version="$VERSION" --option assetRegex='.*(tar).*' - + echo 'Done!' diff --git a/archive/src/kubescape/library_scripts.sh b/archive/src/kubescape/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/kubescape/library_scripts.sh +++ b/archive/src/kubescape/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/mage/install.sh b/archive/src/mage/install.sh index f7b46d00a..54fe2aedc 100755 --- a/archive/src/mage/install.sh +++ b/archive/src/mage/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='magefile/mage' --option binaryNames='mage' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/mage/library_scripts.sh b/archive/src/mage/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/mage/library_scripts.sh +++ b/archive/src/mage/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/meson-asdf/install.sh b/archive/src/meson-asdf/install.sh index c72fcf934..37bd49c17 100755 --- a/archive/src/meson-asdf/install.sh +++ b/archive/src/meson-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='meson' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/meson-asdf/library_scripts.sh b/archive/src/meson-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/meson-asdf/library_scripts.sh +++ b/archive/src/meson-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/meteor-cli/install.sh b/archive/src/meteor-cli/install.sh index 1f7d47c1e..469495b30 100755 --- a/archive/src/meteor-cli/install.sh +++ b/archive/src/meteor-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='meteor' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/meteor-cli/library_scripts.sh b/archive/src/meteor-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/meteor-cli/library_scripts.sh +++ b/archive/src/meteor-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/nushell/install.sh b/archive/src/nushell/install.sh index 7ed3666bf..27cdee014 100755 --- a/archive/src/nushell/install.sh +++ b/archive/src/nushell/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='nushell/nushell' --option binaryNames='nu' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/nushell/library_scripts.sh b/archive/src/nushell/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/nushell/library_scripts.sh +++ b/archive/src/nushell/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/ory-keto/install.sh b/archive/src/ory-keto/install.sh index b836d8e76..a89736e03 100755 --- a/archive/src/ory-keto/install.sh +++ b/archive/src/ory-keto/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ory/keto' --option binaryNames='keto' --option version="$VERSION" --option assetRegex='.*(Linux_64).*' - + echo 'Done!' diff --git a/archive/src/ory-keto/library_scripts.sh b/archive/src/ory-keto/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/ory-keto/library_scripts.sh +++ b/archive/src/ory-keto/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/pip-audit/install.sh b/archive/src/pip-audit/install.sh index 09e85e89c..5309d1c6b 100755 --- a/archive/src/pip-audit/install.sh +++ b/archive/src/pip-audit/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pip-audit' --option version="$VERSION" - + echo 'Done' diff --git a/archive/src/pip-audit/library_scripts.sh b/archive/src/pip-audit/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/pip-audit/library_scripts.sh +++ b/archive/src/pip-audit/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/porter/install.sh b/archive/src/porter/install.sh index 21215bfcf..bf9de62de 100755 --- a/archive/src/porter/install.sh +++ b/archive/src/porter/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" @@ -22,7 +22,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='curl -L https://cdn.porter.sh/latest/install-linux.sh | PORTER_VERSION=$VERSION PORTER_HOME=/usr/local/porter bash -s -- -x' - + if [ -n "$TERRAFORMMIXINVERSION" ] ; then $nanolayer_location \ @@ -39,7 +39,7 @@ if [ -n "$AZMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install az --version $AZMIXINVERSION' -fi +fi if [ -n "$AWSMIXINVERSION" ] ; then $nanolayer_location \ @@ -47,7 +47,7 @@ if [ -n "$AWSMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install aws --version $AWSMIXINVERSION' -fi +fi if [ -n "$DOCKERMIXINVERSION" ] ; then $nanolayer_location \ @@ -55,7 +55,7 @@ if [ -n "$DOCKERMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install docker --version $DOCKERMIXINVERSION' -fi +fi if [ -n "$DOCKERCOMPOSEMIXINVERSION" ] ; then $nanolayer_location \ @@ -63,7 +63,7 @@ if [ -n "$DOCKERCOMPOSEMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install docker-compose --version $DOCKERCOMPOSEMIXINVERSION' -fi +fi if [ -n "$GCLOUDMIXINVERSION" ] ; then $nanolayer_location \ @@ -71,7 +71,7 @@ if [ -n "$GCLOUDMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install gcloud --version $GCLOUDMIXINVERSION' -fi +fi if [ -n "$HELMMIXINVERSION" ] ; then $nanolayer_location \ @@ -79,7 +79,7 @@ if [ -n "$HELMMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install helm --version $HELMMIXINVERSION' -fi +fi if [ -n "$ARMMIXINVERSION" ] ; then $nanolayer_location \ @@ -87,7 +87,7 @@ if [ -n "$ARMMIXINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter mixin install arm --version $ARMMIXINVERSION' -fi +fi if [ -n "$AZUREPLUGINVERSION" ] ; then $nanolayer_location \ @@ -95,7 +95,7 @@ if [ -n "$AZUREPLUGINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter plugin install azure --version $AZUREPLUGINVERSION' -fi +fi if [ -n "$KUBERNETESPLUGINVERSION" ] ; then $nanolayer_location \ @@ -103,7 +103,7 @@ if [ -n "$KUBERNETESPLUGINVERSION" ] ; then devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='/usr/local/porter/porter plugin install kubernetes --version $KUBERNETESPLUGINVERSION' -fi +fi $nanolayer_location \ install \ diff --git a/archive/src/porter/library_scripts.sh b/archive/src/porter/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/porter/library_scripts.sh +++ b/archive/src/porter/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/postgres-asdf/install.sh b/archive/src/postgres-asdf/install.sh index 5d37b1f80..55613756e 100755 --- a/archive/src/postgres-asdf/install.sh +++ b/archive/src/postgres-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='build-essential,libssl-dev,libreadline-dev,zlib1g-dev,libcurl4-openssl-dev,uuid-dev,icu-devtools' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='postgres' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/postgres-asdf/library_scripts.sh b/archive/src/postgres-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/postgres-asdf/library_scripts.sh +++ b/archive/src/postgres-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/rabbitmq-asdf/install.sh b/archive/src/rabbitmq-asdf/install.sh index 7b7b09318..245d63cc0 100755 --- a/archive/src/rabbitmq-asdf/install.sh +++ b/archive/src/rabbitmq-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='build-essential,autoconf,m4,libncurses5-dev,libwxgtk3.0-gtk3-dev,libwxgtk-webview3.0-gtk3-dev,libgl1-mesa-dev,libglu1-mesa-dev,libpng-dev,libssh-dev,unixodbc-dev,xsltproc,fop,libxml2-utils,libncurses-dev,openjdk-11-jdk,procps' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='erlang' --option version="$ERLANGVERSION" - + $nanolayer_location \ @@ -33,7 +33,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='xz-utils' - + $nanolayer_location \ @@ -41,7 +41,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='rabbitmq' --option version="$VERSION" --option latestVersionPattern='v' - + echo 'Done!' diff --git a/archive/src/rabbitmq-asdf/library_scripts.sh b/archive/src/rabbitmq-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/rabbitmq-asdf/library_scripts.sh +++ b/archive/src/rabbitmq-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/scala-asdf/install.sh b/archive/src/scala-asdf/install.sh index edd53a307..69cbe5e2e 100755 --- a/archive/src/scala-asdf/install.sh +++ b/archive/src/scala-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='openjdk-11-jdk' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='scala' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/scala-asdf/library_scripts.sh b/archive/src/scala-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/scala-asdf/library_scripts.sh +++ b/archive/src/scala-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/serverless/install.sh b/archive/src/serverless/install.sh index 301817a51..71b74d118 100755 --- a/archive/src/serverless/install.sh +++ b/archive/src/serverless/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='serverless' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/serverless/library_scripts.sh b/archive/src/serverless/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/serverless/library_scripts.sh +++ b/archive/src/serverless/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/sqlfmt/install.sh b/archive/src/sqlfmt/install.sh index 064fcd8bf..600cc140c 100755 --- a/archive/src/sqlfmt/install.sh +++ b/archive/src/sqlfmt/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='shandy-sqlfmt[jinjafmt]' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/sqlfmt/library_scripts.sh b/archive/src/sqlfmt/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/sqlfmt/library_scripts.sh +++ b/archive/src/sqlfmt/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/syncthing/install.sh b/archive/src/syncthing/install.sh index b0646983d..b2cde39d6 100755 --- a/archive/src/syncthing/install.sh +++ b/archive/src/syncthing/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='syncthing/syncthing' --option binaryNames='syncthing' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/syncthing/library_scripts.sh b/archive/src/syncthing/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/syncthing/library_scripts.sh +++ b/archive/src/syncthing/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/trivy/install.sh b/archive/src/trivy/install.sh index 8c8077434..3cf8445f3 100755 --- a/archive/src/trivy/install.sh +++ b/archive/src/trivy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='aquasecurity/trivy' --option binaryNames='trivy' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/trivy/library_scripts.sh b/archive/src/trivy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/trivy/library_scripts.sh +++ b/archive/src/trivy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/vertx-sdkman/install.sh b/archive/src/vertx-sdkman/install.sh index 1986fb504..ce3b485eb 100755 --- a/archive/src/vertx-sdkman/install.sh +++ b/archive/src/vertx-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/archive/src/vertx-sdkman/library_scripts.sh b/archive/src/vertx-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/vertx-sdkman/library_scripts.sh +++ b/archive/src/vertx-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/visualvm-sdkman/install.sh b/archive/src/visualvm-sdkman/install.sh index 2384fce4d..350d4560d 100755 --- a/archive/src/visualvm-sdkman/install.sh +++ b/archive/src/visualvm-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/archive/src/visualvm-sdkman/library_scripts.sh b/archive/src/visualvm-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/archive/src/visualvm-sdkman/library_scripts.sh +++ b/archive/src/visualvm-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/volta/install.sh b/archive/src/volta/install.sh index 7bcce32d6..b8c5a4b1d 100755 --- a/archive/src/volta/install.sh +++ b/archive/src/volta/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='volta-cli/volta' --option binaryNames='volta' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/volta/library_scripts.sh b/archive/src/volta/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/volta/library_scripts.sh +++ b/archive/src/volta/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/archive/src/xplr/install.sh b/archive/src/xplr/install.sh index badadc0c4..b54c3e7b5 100755 --- a/archive/src/xplr/install.sh +++ b/archive/src/xplr/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sayanarijit/xplr' --option binaryNames='xplr' --option version="$VERSION" - + echo 'Done!' diff --git a/archive/src/xplr/library_scripts.sh b/archive/src/xplr/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/archive/src/xplr/library_scripts.sh +++ b/archive/src/xplr/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/act-asdf/install.sh b/src/act-asdf/install.sh index 45bdd023c..97236a196 100755 --- a/src/act-asdf/install.sh +++ b/src/act-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='act' --option version="$VERSION" - + echo 'Done!' diff --git a/src/act-asdf/library_scripts.sh b/src/act-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/act-asdf/library_scripts.sh +++ b/src/act-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/act/install.sh b/src/act/install.sh index 493e01113..2efe9a767 100755 --- a/src/act/install.sh +++ b/src/act/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='nektos/act' --option binaryNames='act' --option version="$VERSION" - + echo 'Done!' diff --git a/src/act/library_scripts.sh b/src/act/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/act/library_scripts.sh +++ b/src/act/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/actionlint/install.sh b/src/actionlint/install.sh index ced44f716..bda5979b0 100755 --- a/src/actionlint/install.sh +++ b/src/actionlint/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='rhysd/actionlint' --option binaryNames='actionlint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/actionlint/library_scripts.sh b/src/actionlint/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/actionlint/library_scripts.sh +++ b/src/actionlint/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/actions-runner/install.sh b/src/actions-runner/install.sh index 780501321..779963b90 100755 --- a/src/actions-runner/install.sh +++ b/src/actions-runner/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers/features/dotnet:1.1.3" \ --option dotnetVersion="$DOTNETVERSION" --option runtimeOnly='true' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='actions/runner' --option binaryNames='run.sh' --option binLocation='$_REMOTE_USER_HOME/.local/bin' --option version="$VERSION" --option libLocation='$_REMOTE_USER_HOME' --option libName='actions-runner' --option assetRegex='^(?!.*(noruntime))(?!.*(trimmedpackages))(?!.*(noexternals))' - + $nanolayer_location \ @@ -33,7 +33,7 @@ $nanolayer_location \ "ghcr.io/devcontainers-contrib/features/bash-command:1.0.0" \ --option command='rm /home/vscode/.local/bin/run.sh && \ chown -hR ${_REMOTE_USER}:${_REMOTE_USER} $_REMOTE_USER_HOME/actions-runner' - + echo "GitHub Actions Runner now installed at $_REMOTE_USER_HOME/actions-runner" diff --git a/src/actions-runner/library_scripts.sh b/src/actions-runner/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/actions-runner/library_scripts.sh +++ b/src/actions-runner/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/activemq-sdkman/install.sh b/src/activemq-sdkman/install.sh index 3764b1559..5fecb6c66 100755 --- a/src/activemq-sdkman/install.sh +++ b/src/activemq-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/activemq-sdkman/library_scripts.sh b/src/activemq-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/activemq-sdkman/library_scripts.sh +++ b/src/activemq-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/akamai-cli/install.sh b/src/akamai-cli/install.sh index d4395b973..ebbc1686c 100755 --- a/src/akamai-cli/install.sh +++ b/src/akamai-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='akamai/cli' --option binaryNames='akamai' --option version="$VERSION" - + echo 'Done!' diff --git a/src/akamai-cli/library_scripts.sh b/src/akamai-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/akamai-cli/library_scripts.sh +++ b/src/akamai-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/alertmanager/install.sh b/src/alertmanager/install.sh index eafc12112..f0e0ee0f6 100755 --- a/src/alertmanager/install.sh +++ b/src/alertmanager/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/alertmanager' --option binaryNames='alertmanager' --option version="$VERSION" - + echo 'Done!' diff --git a/src/alertmanager/library_scripts.sh b/src/alertmanager/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/alertmanager/library_scripts.sh +++ b/src/alertmanager/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/alp-asdf/install.sh b/src/alp-asdf/install.sh index 5afc08acc..a0e2e5654 100755 --- a/src/alp-asdf/install.sh +++ b/src/alp-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='alp' --option version="$VERSION" - + echo 'Done!' diff --git a/src/alp-asdf/library_scripts.sh b/src/alp-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/alp-asdf/library_scripts.sh +++ b/src/alp-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/amplify-cli/install.sh b/src/amplify-cli/install.sh index 7a8067587..61c9aaf5f 100755 --- a/src/amplify-cli/install.sh +++ b/src/amplify-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@aws-amplify/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/amplify-cli/library_scripts.sh b/src/amplify-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/amplify-cli/library_scripts.sh +++ b/src/amplify-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/angular-cli/install.sh b/src/angular-cli/install.sh index 03df44af8..a23eea5b5 100755 --- a/src/angular-cli/install.sh +++ b/src/angular-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@angular/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/angular-cli/library_scripts.sh b/src/angular-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/angular-cli/library_scripts.sh +++ b/src/angular-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ansible/library_scripts.sh b/src/ansible/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ansible/library_scripts.sh +++ b/src/ansible/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ant-sdkman/install.sh b/src/ant-sdkman/install.sh index cdf4a76ea..c4c032706 100755 --- a/src/ant-sdkman/install.sh +++ b/src/ant-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ant-sdkman/library_scripts.sh b/src/ant-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ant-sdkman/library_scripts.sh +++ b/src/ant-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/apko/install.sh b/src/apko/install.sh index a5c7a81a9..a3b0642ab 100755 --- a/src/apko/install.sh +++ b/src/apko/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='chainguard-dev/apko' --option binaryNames='apko' --option version="$VERSION" - + echo 'Done!' diff --git a/src/apko/library_scripts.sh b/src/apko/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/apko/library_scripts.sh +++ b/src/apko/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/apt-get-packages/install.sh b/src/apt-get-packages/install.sh index 2f5f69912..8d9370175 100755 --- a/src/apt-get-packages/install.sh +++ b/src/apt-get-packages/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.46" @@ -24,5 +24,5 @@ $nanolayer_location install apt-get "$PACKAGES" \ $force_ppas_cmd \ $clean_ppas_cmd \ $preserve_apt_list_cmd \ - + diff --git a/src/apt-get-packages/library_scripts.sh b/src/apt-get-packages/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/apt-get-packages/library_scripts.sh +++ b/src/apt-get-packages/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/apt-packages/install.sh b/src/apt-packages/install.sh index 7b025b0da..4d5650caf 100755 --- a/src/apt-packages/install.sh +++ b/src/apt-packages/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.46" @@ -24,5 +24,5 @@ $nanolayer_location install apt "$PACKAGES" \ $force_ppas_cmd \ $clean_ppas_cmd \ $preserve_apt_list_cmd \ - + diff --git a/src/apt-packages/library_scripts.sh b/src/apt-packages/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/apt-packages/library_scripts.sh +++ b/src/apt-packages/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/argo-cd/install.sh b/src/argo-cd/install.sh index b2ac753d8..59af15f26 100755 --- a/src/argo-cd/install.sh +++ b/src/argo-cd/install.sh @@ -70,7 +70,7 @@ find_version_from_git_tags() { } # make sure we have curl -check_packages ca-certificates curl +check_packages ca-certificates curl # make sure version is available find_version_from_git_tags ARGOCD_VERSION 'https://github.com/argoproj/argo-cd' diff --git a/src/asciidoctorj-sdkman/install.sh b/src/asciidoctorj-sdkman/install.sh index 50fb1a26b..222c6ca28 100755 --- a/src/asciidoctorj-sdkman/install.sh +++ b/src/asciidoctorj-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/asciidoctorj-sdkman/library_scripts.sh b/src/asciidoctorj-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/asciidoctorj-sdkman/library_scripts.sh +++ b/src/asciidoctorj-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/asdf-package/install.sh b/src/asdf-package/install.sh index 7d5f22011..a3dc63ff1 100755 --- a/src/asdf-package/install.sh +++ b/src/asdf-package/install.sh @@ -62,7 +62,7 @@ install_via_asdf() { elif cat /etc/os-release | grep "ID_LIKE=.*debian.*\|ID=.*debian.*"; then check_packages curl git ca-certificates fi - + # asdf may be installed somewhere on the machine, but we need it to be accessible to the remote user # the code bellow will return 2 only when asdf is available, and 1 otherwise @@ -75,7 +75,7 @@ install_via_asdf() { EOF exit_code=$? set -e - + if [ "${exit_code}" -eq 2 ]; then # asdf already available to remote user, use it su - "$_REMOTE_USER" </dev/null 2>&1; then echo "$PLUGIN already exists - skipping adding it" else - asdf plugin add "$PLUGIN" "$REPO" + asdf plugin add "$PLUGIN" "$REPO" fi if [ "${VERSION}" = "latest" ] ; then @@ -112,15 +112,15 @@ EOF if asdf list "$PLUGIN" >/dev/null 2>&1; then echo "$PLUGIN already exists - skipping adding it" else - asdf plugin add "$PLUGIN" "$REPO" + asdf plugin add "$PLUGIN" "$REPO" fi - + EOF - # I resolve the version like this because in bash resolving + # I resolve the version like this because in bash resolving # a subshell take prevedent to su, so we must resolve variables - # pre using them in final su clause. + # pre using them in final su clause. # I hate bash. resolved_version=$(su - "$_REMOTE_USER" < /dev/null 2>&1 @@ -131,7 +131,7 @@ EOF echo $VERSION fi EOF -) +) su - "$_REMOTE_USER" </dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/atlantis/install.sh b/src/atlantis/install.sh index 1ffebae3c..99b83f9b5 100755 --- a/src/atlantis/install.sh +++ b/src/atlantis/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='runatlantis/atlantis' --option binaryNames='atlantis' --option version="$VERSION" - + echo 'Done!' diff --git a/src/atlantis/library_scripts.sh b/src/atlantis/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/atlantis/library_scripts.sh +++ b/src/atlantis/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/atmos/install.sh b/src/atmos/install.sh index a5a502b5c..19b672134 100755 --- a/src/atmos/install.sh +++ b/src/atmos/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='cloudposse/atmos' --option binaryNames='atmos' --option version="$VERSION" - + echo 'Done!' diff --git a/src/atmos/library_scripts.sh b/src/atmos/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/atmos/library_scripts.sh +++ b/src/atmos/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/auditjs/install.sh b/src/auditjs/install.sh index 0babae28a..d13c51f98 100755 --- a/src/auditjs/install.sh +++ b/src/auditjs/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='auditjs' --option version="$VERSION" - + echo 'Done' diff --git a/src/auditjs/library_scripts.sh b/src/auditjs/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/auditjs/library_scripts.sh +++ b/src/auditjs/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/autoenv/install.sh b/src/autoenv/install.sh index 3c9bfb8a7..9f8484101 100755 --- a/src/autoenv/install.sh +++ b/src/autoenv/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@hyperupcall/autoenv' --option version="$VERSION" - + echo 'In order to enable autoenv , execute `source $(npm root -g)/@hyperupcall/autoenv/activate.sh` in your shell' diff --git a/src/autoenv/library_scripts.sh b/src/autoenv/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/autoenv/library_scripts.sh +++ b/src/autoenv/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/aws-cdk/install.sh b/src/aws-cdk/install.sh index fa1057476..ad332dd54 100755 --- a/src/aws-cdk/install.sh +++ b/src/aws-cdk/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='aws-cdk' --option version="$VERSION" - + echo 'Done!' diff --git a/src/aws-cdk/library_scripts.sh b/src/aws-cdk/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/aws-cdk/library_scripts.sh +++ b/src/aws-cdk/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/aws-eb-cli/install.sh b/src/aws-eb-cli/install.sh index 6869eb6f9..598ce4eed 100755 --- a/src/aws-eb-cli/install.sh +++ b/src/aws-eb-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='awsebcli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/aws-eb-cli/library_scripts.sh b/src/aws-eb-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/aws-eb-cli/library_scripts.sh +++ b/src/aws-eb-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/aztfexport/install.sh b/src/aztfexport/install.sh index 2e7fad9b9..66b24dd4e 100755 --- a/src/aztfexport/install.sh +++ b/src/aztfexport/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='Azure/aztfexport' --option binaryNames='aztfexport' --option version="$VERSION" - + echo 'Done!' diff --git a/src/aztfexport/library_scripts.sh b/src/aztfexport/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/aztfexport/library_scripts.sh +++ b/src/aztfexport/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/azure-apiops/library_scripts.sh b/src/azure-apiops/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/azure-apiops/library_scripts.sh +++ b/src/azure-apiops/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ballerina-sdkman/install.sh b/src/ballerina-sdkman/install.sh index 9d0c027eb..5ffe763b4 100755 --- a/src/ballerina-sdkman/install.sh +++ b/src/ballerina-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ballerina-sdkman/library_scripts.sh b/src/ballerina-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ballerina-sdkman/library_scripts.sh +++ b/src/ballerina-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bandit/install.sh b/src/bandit/install.sh index 0be57149e..b9f7f820d 100755 --- a/src/bandit/install.sh +++ b/src/bandit/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='bandit' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bandit/library_scripts.sh b/src/bandit/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bandit/library_scripts.sh +++ b/src/bandit/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bartib/install.sh b/src/bartib/install.sh index ed643d6bf..eeb9d2d01 100755 --- a/src/bartib/install.sh +++ b/src/bartib/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='nikolassv/bartib' --option binaryNames='bartib' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bartib/library_scripts.sh b/src/bartib/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bartib/library_scripts.sh +++ b/src/bartib/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/beehive/install.sh b/src/beehive/install.sh index 0008a1856..27a7d1370 100755 --- a/src/beehive/install.sh +++ b/src/beehive/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='muesli/beehive' --option binaryNames='beehive' --option version="$VERSION" - + echo 'Done!' diff --git a/src/beehive/library_scripts.sh b/src/beehive/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/beehive/library_scripts.sh +++ b/src/beehive/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bigcommerce-stencil-cli/install.sh b/src/bigcommerce-stencil-cli/install.sh index c1b289456..6e7fea88d 100755 --- a/src/bigcommerce-stencil-cli/install.sh +++ b/src/bigcommerce-stencil-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@bigcommerce/stencil-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bigcommerce-stencil-cli/library_scripts.sh b/src/bigcommerce-stencil-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bigcommerce-stencil-cli/library_scripts.sh +++ b/src/bigcommerce-stencil-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bikeshed/install.sh b/src/bikeshed/install.sh index 79b8c0350..f2027eff3 100755 --- a/src/bikeshed/install.sh +++ b/src/bikeshed/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='bikeshed' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bikeshed/library_scripts.sh b/src/bikeshed/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bikeshed/library_scripts.sh +++ b/src/bikeshed/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bin/install.sh b/src/bin/install.sh index c6aaf270b..ef3ddae4e 100755 --- a/src/bin/install.sh +++ b/src/bin/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='marcosnils/bin' --option binaryNames='bin' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bin/library_scripts.sh b/src/bin/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bin/library_scripts.sh +++ b/src/bin/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/black/install.sh b/src/black/install.sh index a2196002f..f5c6bd998 100755 --- a/src/black/install.sh +++ b/src/black/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='black' --option version="$VERSION" - + echo 'Done!' diff --git a/src/black/library_scripts.sh b/src/black/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/black/library_scripts.sh +++ b/src/black/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/blackbox-exporter/install.sh b/src/blackbox-exporter/install.sh index 809263f9d..fac31cd8d 100755 --- a/src/blackbox-exporter/install.sh +++ b/src/blackbox-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/blackbox_exporter' --option binaryNames='blackbox_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/blackbox-exporter/library_scripts.sh b/src/blackbox-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/blackbox-exporter/library_scripts.sh +++ b/src/blackbox-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bomber/install.sh b/src/bomber/install.sh index a0a6305ae..fc6541b49 100755 --- a/src/bomber/install.sh +++ b/src/bomber/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='devops-kung-fu/bomber' --option binaryNames='bomber' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bomber/library_scripts.sh b/src/bomber/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bomber/library_scripts.sh +++ b/src/bomber/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bower/install.sh b/src/bower/install.sh index 332ff1da4..e5bcab6c1 100755 --- a/src/bower/install.sh +++ b/src/bower/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='bower' --option version="$VERSION" - + echo 'Done!' diff --git a/src/bower/library_scripts.sh b/src/bower/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/bower/library_scripts.sh +++ b/src/bower/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/bpipe-sdkman/install.sh b/src/bpipe-sdkman/install.sh index ad3841d0e..a469d4491 100755 --- a/src/bpipe-sdkman/install.sh +++ b/src/bpipe-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/bpipe-sdkman/library_scripts.sh b/src/bpipe-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/bpipe-sdkman/library_scripts.sh +++ b/src/bpipe-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/brownie/library_scripts.sh b/src/brownie/library_scripts.sh index 0d7f34d4b..ed393e86f 100755 --- a/src/brownie/library_scripts.sh +++ b/src/brownie/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/browserify/install.sh b/src/browserify/install.sh index b53fe90b9..97ea25e0a 100755 --- a/src/browserify/install.sh +++ b/src/browserify/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='browserify' --option version="$VERSION" - + echo 'Done!' diff --git a/src/browserify/library_scripts.sh b/src/browserify/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/browserify/library_scripts.sh +++ b/src/browserify/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/btop-homebrew/install.sh b/src/btop-homebrew/install.sh index bf309754a..d58ad1e41 100755 --- a/src/btop-homebrew/install.sh +++ b/src/btop-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/btop-homebrew/library_scripts.sh b/src/btop-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/btop-homebrew/library_scripts.sh +++ b/src/btop-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/btrace-sdkman/install.sh b/src/btrace-sdkman/install.sh index 00d2ace9b..43a811ed9 100755 --- a/src/btrace-sdkman/install.sh +++ b/src/btrace-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/btrace-sdkman/library_scripts.sh b/src/btrace-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/btrace-sdkman/library_scripts.sh +++ b/src/btrace-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/budibase-cli/install.sh b/src/budibase-cli/install.sh index 12eacc07c..29e55d36c 100755 --- a/src/budibase-cli/install.sh +++ b/src/budibase-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@budibase/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/budibase-cli/library_scripts.sh b/src/budibase-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/budibase-cli/library_scripts.sh +++ b/src/budibase-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/buku/install.sh b/src/buku/install.sh index 8ae16b7d0..19ceec7d9 100755 --- a/src/buku/install.sh +++ b/src/buku/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='buku' --option version="$VERSION" - + echo 'Done!' diff --git a/src/buku/library_scripts.sh b/src/buku/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/buku/library_scripts.sh +++ b/src/buku/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/caddy/install.sh b/src/caddy/install.sh index 0f99f5f05..f7846902e 100755 --- a/src/caddy/install.sh +++ b/src/caddy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers/features/go:1.1.3" \ --option version="$GOLANGVERSION" - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='caddyserver/caddy' --option binaryNames='caddy' --option version="$VERSION" - + echo 'Done!' diff --git a/src/caddy/library_scripts.sh b/src/caddy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/caddy/library_scripts.sh +++ b/src/caddy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ccache-asdf/install.sh b/src/ccache-asdf/install.sh index b14b40c2f..7c0cd8772 100755 --- a/src/ccache-asdf/install.sh +++ b/src/ccache-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='build-essential,cmake' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='ccache' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ccache-asdf/library_scripts.sh b/src/ccache-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ccache-asdf/library_scripts.sh +++ b/src/ccache-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/checkov/install.sh b/src/checkov/install.sh index d8db23afb..0a851f820 100755 --- a/src/checkov/install.sh +++ b/src/checkov/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='checkov' --option version="$VERSION" - + diff --git a/src/checkov/library_scripts.sh b/src/checkov/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/checkov/library_scripts.sh +++ b/src/checkov/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/chezscheme-asdf/install.sh b/src/chezscheme-asdf/install.sh index e7bb35944..5df65abc8 100755 --- a/src/chezscheme-asdf/install.sh +++ b/src/chezscheme-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='build-essential,uuid-dev,libncurses5-dev,libx11-dev' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='chezscheme' --option version="$VERSION" - + echo 'Done!' diff --git a/src/chezscheme-asdf/library_scripts.sh b/src/chezscheme-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/chezscheme-asdf/library_scripts.sh +++ b/src/chezscheme-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/chisel/install.sh b/src/chisel/install.sh index 6dc460d2f..69bec3a91 100755 --- a/src/chisel/install.sh +++ b/src/chisel/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='jpillora/chisel' --option binaryNames='chisel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/chisel/library_scripts.sh b/src/chisel/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/chisel/library_scripts.sh +++ b/src/chisel/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/circleci-cli/install.sh b/src/circleci-cli/install.sh index d817f9ba6..ca55c6bae 100755 --- a/src/circleci-cli/install.sh +++ b/src/circleci-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='CircleCI-Public/circleci-cli' --option binaryNames='circleci' --option version="$VERSION" - + echo 'Done!' diff --git a/src/circleci-cli/library_scripts.sh b/src/circleci-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/circleci-cli/library_scripts.sh +++ b/src/circleci-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/clojure-asdf/install.sh b/src/clojure-asdf/install.sh index f4c3c845d..9d744d804 100755 --- a/src/clojure-asdf/install.sh +++ b/src/clojure-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='clojure' --option version="$VERSION" - + echo 'Done!' diff --git a/src/clojure-asdf/library_scripts.sh b/src/clojure-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/clojure-asdf/library_scripts.sh +++ b/src/clojure-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cloud-nuke/install.sh b/src/cloud-nuke/install.sh index a8f15b275..344966c8a 100755 --- a/src/cloud-nuke/install.sh +++ b/src/cloud-nuke/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='gruntwork-io/cloud-nuke' --option binaryNames='cloud-nuke' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cloud-nuke/library_scripts.sh b/src/cloud-nuke/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cloud-nuke/library_scripts.sh +++ b/src/cloud-nuke/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cloudflare-wrangler/install.sh b/src/cloudflare-wrangler/install.sh index 59b63a9a8..b4de677c9 100755 --- a/src/cloudflare-wrangler/install.sh +++ b/src/cloudflare-wrangler/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='wrangler' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cloudflare-wrangler/library_scripts.sh b/src/cloudflare-wrangler/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cloudflare-wrangler/library_scripts.sh +++ b/src/cloudflare-wrangler/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cloudflared-fips/install.sh b/src/cloudflared-fips/install.sh index 0bb8d4659..9d9b08076 100755 --- a/src/cloudflared-fips/install.sh +++ b/src/cloudflared-fips/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='cloudflare/cloudflared' --option binaryNames='cloudflared' --option version="$VERSION" --option assetRegex='.*(fips).*' - + echo 'Done!' diff --git a/src/cloudflared-fips/library_scripts.sh b/src/cloudflared-fips/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cloudflared-fips/library_scripts.sh +++ b/src/cloudflared-fips/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cloudflared/install.sh b/src/cloudflared/install.sh index f87fcee6c..d5dc71a3f 100755 --- a/src/cloudflared/install.sh +++ b/src/cloudflared/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='cloudflare/cloudflared' --option binaryNames='cloudflared' --option version="$VERSION" --option assetRegex='^(?!.*(fips))' - + echo 'Done!' diff --git a/src/cloudflared/library_scripts.sh b/src/cloudflared/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cloudflared/library_scripts.sh +++ b/src/cloudflared/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cloudinary-cli/install.sh b/src/cloudinary-cli/install.sh index a874b7536..12daf831c 100755 --- a/src/cloudinary-cli/install.sh +++ b/src/cloudinary-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cloudinary-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cloudinary-cli/library_scripts.sh b/src/cloudinary-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cloudinary-cli/library_scripts.sh +++ b/src/cloudinary-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/codefresh-cli/install.sh b/src/codefresh-cli/install.sh index 82ebc90cd..ad29c208a 100755 --- a/src/codefresh-cli/install.sh +++ b/src/codefresh-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codefresh-io/cli' --option binaryNames='codefresh' --option version="$VERSION" - + echo 'Done!' diff --git a/src/codefresh-cli/library_scripts.sh b/src/codefresh-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/codefresh-cli/library_scripts.sh +++ b/src/codefresh-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/composer/install.sh b/src/composer/install.sh index 80872b17c..19164c9e0 100755 --- a/src/composer/install.sh +++ b/src/composer/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.24" \ --option repo='composer/composer' --option binaryNames='composer' --option version="$VERSION" - + echo 'Done!' diff --git a/src/composer/library_scripts.sh b/src/composer/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/composer/library_scripts.sh +++ b/src/composer/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/concurnas-sdkman/install.sh b/src/concurnas-sdkman/install.sh index 217e5a05f..067d9f06a 100755 --- a/src/concurnas-sdkman/install.sh +++ b/src/concurnas-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/concurnas-sdkman/library_scripts.sh b/src/concurnas-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/concurnas-sdkman/library_scripts.sh +++ b/src/concurnas-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/connor-sdkman/install.sh b/src/connor-sdkman/install.sh index 004bd1449..ea2850a42 100755 --- a/src/connor-sdkman/install.sh +++ b/src/connor-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/connor-sdkman/library_scripts.sh b/src/connor-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/connor-sdkman/library_scripts.sh +++ b/src/connor-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/consul-asdf/install.sh b/src/consul-asdf/install.sh index abc32b8fe..d46f55cc3 100755 --- a/src/consul-asdf/install.sh +++ b/src/consul-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='consul' --option version="$VERSION" - + echo 'Done!' diff --git a/src/consul-asdf/library_scripts.sh b/src/consul-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/consul-asdf/library_scripts.sh +++ b/src/consul-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/consul-exporter/install.sh b/src/consul-exporter/install.sh index 378f09ab0..8118b7969 100755 --- a/src/consul-exporter/install.sh +++ b/src/consul-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/consul_exporter' --option binaryNames='consul_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/consul-exporter/library_scripts.sh b/src/consul-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/consul-exporter/library_scripts.sh +++ b/src/consul-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cookiecutter/install.sh b/src/cookiecutter/install.sh index d9b09ce78..6c3f407f9 100755 --- a/src/cookiecutter/install.sh +++ b/src/cookiecutter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cookiecutter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cookiecutter/library_scripts.sh b/src/cookiecutter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cookiecutter/library_scripts.sh +++ b/src/cookiecutter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/copier/install.sh b/src/copier/install.sh index dcc7e929c..76ff71639 100755 --- a/src/copier/install.sh +++ b/src/copier/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='copier' --option version="$VERSION" - + echo 'Done!' diff --git a/src/copier/library_scripts.sh b/src/copier/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/copier/library_scripts.sh +++ b/src/copier/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/corepack/install.sh b/src/corepack/install.sh index 6f379d778..9d3ae3cf9 100755 --- a/src/corepack/install.sh +++ b/src/corepack/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='corepack' --option version="$VERSION" - + echo 'Done!' diff --git a/src/corepack/library_scripts.sh b/src/corepack/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/corepack/library_scripts.sh +++ b/src/corepack/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cosign/install.sh b/src/cosign/install.sh index f77bec479..f980d267f 100755 --- a/src/cosign/install.sh +++ b/src/cosign/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigstore/cosign' --option binaryNames='cosign' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cosign/library_scripts.sh b/src/cosign/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cosign/library_scripts.sh +++ b/src/cosign/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/coverage-py/install.sh b/src/coverage-py/install.sh index 878d1d54f..843d88560 100755 --- a/src/coverage-py/install.sh +++ b/src/coverage-py/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='coverage' --option version="$VERSION" - + echo 'Done!' diff --git a/src/coverage-py/library_scripts.sh b/src/coverage-py/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/coverage-py/library_scripts.sh +++ b/src/coverage-py/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/crystal-asdf/install.sh b/src/crystal-asdf/install.sh index ce307423c..5ec433532 100755 --- a/src/crystal-asdf/install.sh +++ b/src/crystal-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='crystal' --option version="$VERSION" - + echo 'Done!' diff --git a/src/crystal-asdf/library_scripts.sh b/src/crystal-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/crystal-asdf/library_scripts.sh +++ b/src/crystal-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cuba-sdkman/install.sh b/src/cuba-sdkman/install.sh index 1f5aaa165..3d41dbfbd 100755 --- a/src/cuba-sdkman/install.sh +++ b/src/cuba-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/cuba-sdkman/library_scripts.sh b/src/cuba-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/cuba-sdkman/library_scripts.sh +++ b/src/cuba-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/curl-apt-get/install.sh b/src/curl-apt-get/install.sh index 6f1dafe39..96ef27c2c 100755 --- a/src/curl-apt-get/install.sh +++ b/src/curl-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/curl-apt-get/library_scripts.sh b/src/curl-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/curl-apt-get/library_scripts.sh +++ b/src/curl-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/curl-homebrew/install.sh b/src/curl-homebrew/install.sh index cfcd4ddc5..601b65796 100755 --- a/src/curl-homebrew/install.sh +++ b/src/curl-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/curl-homebrew/library_scripts.sh b/src/curl-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/curl-homebrew/library_scripts.sh +++ b/src/curl-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cve-bin-tool/install.sh b/src/cve-bin-tool/install.sh index b980f7a24..d2fef92ac 100755 --- a/src/cve-bin-tool/install.sh +++ b/src/cve-bin-tool/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cve-bin-tool' --option version="$VERSION" - + echo 'Done' diff --git a/src/cve-bin-tool/library_scripts.sh b/src/cve-bin-tool/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cve-bin-tool/library_scripts.sh +++ b/src/cve-bin-tool/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cxf-sdkman/install.sh b/src/cxf-sdkman/install.sh index bf163edb3..45ce30f8c 100755 --- a/src/cxf-sdkman/install.sh +++ b/src/cxf-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/cxf-sdkman/library_scripts.sh b/src/cxf-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/cxf-sdkman/library_scripts.sh +++ b/src/cxf-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cyclonedx-cli/install.sh b/src/cyclonedx-cli/install.sh index b6723fad8..508f17ea7 100755 --- a/src/cyclonedx-cli/install.sh +++ b/src/cyclonedx-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='CycloneDX/cyclonedx-cli' --option binaryNames='cyclonedx' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cyclonedx-cli/library_scripts.sh b/src/cyclonedx-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cyclonedx-cli/library_scripts.sh +++ b/src/cyclonedx-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cyclonedx-python/install.sh b/src/cyclonedx-python/install.sh index e24366cee..9df58bb46 100755 --- a/src/cyclonedx-python/install.sh +++ b/src/cyclonedx-python/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='cyclonedx-bom' --option version="$VERSION" - + echo 'Done' diff --git a/src/cyclonedx-python/library_scripts.sh b/src/cyclonedx-python/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cyclonedx-python/library_scripts.sh +++ b/src/cyclonedx-python/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/cz-cli/install.sh b/src/cz-cli/install.sh index 6e8e1ddfc..ddfd3b6a8 100755 --- a/src/cz-cli/install.sh +++ b/src/cz-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='commitizen' --option version="$VERSION" - + echo 'Done!' diff --git a/src/cz-cli/library_scripts.sh b/src/cz-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/cz-cli/library_scripts.sh +++ b/src/cz-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dasel-asdf/install.sh b/src/dasel-asdf/install.sh index d818475fe..30ccb3d28 100755 --- a/src/dasel-asdf/install.sh +++ b/src/dasel-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='dasel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dasel-asdf/library_scripts.sh b/src/dasel-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/dasel-asdf/library_scripts.sh +++ b/src/dasel-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dashlane-cli/install.sh b/src/dashlane-cli/install.sh index d8f99d25c..18d4f059a 100755 --- a/src/dashlane-cli/install.sh +++ b/src/dashlane-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='Dashlane/dashlane-cli' --option binaryNames='dcli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dashlane-cli/library_scripts.sh b/src/dashlane-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dashlane-cli/library_scripts.sh +++ b/src/dashlane-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/datadog-ci-cli/install.sh b/src/datadog-ci-cli/install.sh index feeeef9d0..57857cf9c 100755 --- a/src/datadog-ci-cli/install.sh +++ b/src/datadog-ci-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='DataDog/datadog-ci' --option binaryNames='datadog-ci' --option version="$VERSION" - + echo 'Done!' diff --git a/src/datadog-ci-cli/library_scripts.sh b/src/datadog-ci-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/datadog-ci-cli/library_scripts.sh +++ b/src/datadog-ci-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/datasette/install.sh b/src/datasette/install.sh index 9dfef8d5f..4025b385c 100755 --- a/src/datasette/install.sh +++ b/src/datasette/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='datasette' --option version="$VERSION" - + echo 'Done!' diff --git a/src/datasette/library_scripts.sh b/src/datasette/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/datasette/library_scripts.sh +++ b/src/datasette/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dbt-coverage/install.sh b/src/dbt-coverage/install.sh index c53add058..32e78199c 100755 --- a/src/dbt-coverage/install.sh +++ b/src/dbt-coverage/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='dbt-coverage' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dbt-coverage/library_scripts.sh b/src/dbt-coverage/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dbt-coverage/library_scripts.sh +++ b/src/dbt-coverage/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ddgr-apt-get/install.sh b/src/ddgr-apt-get/install.sh index dab7a137d..822d37fc4 100755 --- a/src/ddgr-apt-get/install.sh +++ b/src/ddgr-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ddgr-apt-get/library_scripts.sh b/src/ddgr-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ddgr-apt-get/library_scripts.sh +++ b/src/ddgr-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ddgr-homebrew/install.sh b/src/ddgr-homebrew/install.sh index 089557db3..7ea89d2cd 100755 --- a/src/ddgr-homebrew/install.sh +++ b/src/ddgr-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/ddgr-homebrew/library_scripts.sh b/src/ddgr-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ddgr-homebrew/library_scripts.sh +++ b/src/ddgr-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/deno-asdf/install.sh b/src/deno-asdf/install.sh index 7d2570a2e..2423319ff 100755 --- a/src/deno-asdf/install.sh +++ b/src/deno-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='deno' --option version="$VERSION" - + echo 'Done!' diff --git a/src/deno-asdf/library_scripts.sh b/src/deno-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/deno-asdf/library_scripts.sh +++ b/src/deno-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/devcontainers-cli/install.sh b/src/devcontainers-cli/install.sh index 749eaae9f..b838c163f 100755 --- a/src/devcontainers-cli/install.sh +++ b/src/devcontainers-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@devcontainers/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/devcontainers-cli/library_scripts.sh b/src/devcontainers-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/devcontainers-cli/library_scripts.sh +++ b/src/devcontainers-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/direnv-asdf/install.sh b/src/direnv-asdf/install.sh index 68c113229..146d4e6d8 100755 --- a/src/direnv-asdf/install.sh +++ b/src/direnv-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='direnv' --option version="$VERSION" - + echo 'Done!' diff --git a/src/direnv-asdf/library_scripts.sh b/src/direnv-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/direnv-asdf/library_scripts.sh +++ b/src/direnv-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/direnv/install.sh b/src/direnv/install.sh index f6f591889..53b914968 100755 --- a/src/direnv/install.sh +++ b/src/direnv/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='direnv/direnv' --option binaryNames='direnv' --option version="$VERSION" - + echo 'Done!' diff --git a/src/direnv/library_scripts.sh b/src/direnv/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/direnv/library_scripts.sh +++ b/src/direnv/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dive/install.sh b/src/dive/install.sh index 8533301af..aad398bfc 100755 --- a/src/dive/install.sh +++ b/src/dive/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='wagoodman/dive' --option binaryNames='dive' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dive/library_scripts.sh b/src/dive/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dive/library_scripts.sh +++ b/src/dive/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dnote/install.sh b/src/dnote/install.sh index b36fa3e36..0f9dd1725 100755 --- a/src/dnote/install.sh +++ b/src/dnote/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='dnote/dnote' --option binaryNames='dnote' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dnote/library_scripts.sh b/src/dnote/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dnote/library_scripts.sh +++ b/src/dnote/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/doctoolchain-sdkman/install.sh b/src/doctoolchain-sdkman/install.sh index 55c1396e4..0711afd24 100755 --- a/src/doctoolchain-sdkman/install.sh +++ b/src/doctoolchain-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/doctoolchain-sdkman/library_scripts.sh b/src/doctoolchain-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/doctoolchain-sdkman/library_scripts.sh +++ b/src/doctoolchain-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dprint-asdf/install.sh b/src/dprint-asdf/install.sh index fe2f31a17..3c287285a 100755 --- a/src/dprint-asdf/install.sh +++ b/src/dprint-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='dprint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dprint-asdf/library_scripts.sh b/src/dprint-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/dprint-asdf/library_scripts.sh +++ b/src/dprint-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/driftctl/install.sh b/src/driftctl/install.sh index d0ee66f13..b17dd204d 100755 --- a/src/driftctl/install.sh +++ b/src/driftctl/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='snyk/driftctl' --option binaryNames='driftctl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/driftctl/library_scripts.sh b/src/driftctl/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/driftctl/library_scripts.sh +++ b/src/driftctl/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/drone-cli/install.sh b/src/drone-cli/install.sh index 6d3792378..853e70c91 100755 --- a/src/drone-cli/install.sh +++ b/src/drone-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='harness/drone-cli' --option binaryNames='drone' --option version="$VERSION" - + echo 'Done!' diff --git a/src/drone-cli/library_scripts.sh b/src/drone-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/drone-cli/library_scripts.sh +++ b/src/drone-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dua/install.sh b/src/dua/install.sh index 446b86c3b..d88683089 100755 --- a/src/dua/install.sh +++ b/src/dua/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='Byron/dua-cli' --option binaryNames='dua' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dua/library_scripts.sh b/src/dua/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dua/library_scripts.sh +++ b/src/dua/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/duf/install.sh b/src/duf/install.sh index 7195dd9ad..e5a267c48 100755 --- a/src/duf/install.sh +++ b/src/duf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='muesli/duf' --option binaryNames='duf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/duf/library_scripts.sh b/src/duf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/duf/library_scripts.sh +++ b/src/duf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/dufs/install.sh b/src/dufs/install.sh index eb3464e7a..3573dc1aa 100755 --- a/src/dufs/install.sh +++ b/src/dufs/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigoden/dufs' --option binaryNames='dufs' --option version="$VERSION" - + echo 'Done!' diff --git a/src/dufs/library_scripts.sh b/src/dufs/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/dufs/library_scripts.sh +++ b/src/dufs/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/eas-cli/install.sh b/src/eas-cli/install.sh index f30ff8a22..2eb821ac1 100755 --- a/src/eas-cli/install.sh +++ b/src/eas-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='eas-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/eas-cli/library_scripts.sh b/src/eas-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/eas-cli/library_scripts.sh +++ b/src/eas-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/eget/install.sh b/src/eget/install.sh index b10647ca8..5c9b04445 100755 --- a/src/eget/install.sh +++ b/src/eget/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='zyedidia/eget' --option binaryNames='eget' --option version="$VERSION" - + echo 'Done!' diff --git a/src/eget/library_scripts.sh b/src/eget/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/eget/library_scripts.sh +++ b/src/eget/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/elasticsearch-asdf/install.sh b/src/elasticsearch-asdf/install.sh index b7e423462..ea4012714 100755 --- a/src/elasticsearch-asdf/install.sh +++ b/src/elasticsearch-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='elasticsearch' --option version="$VERSION" - + echo 'Done!' diff --git a/src/elasticsearch-asdf/library_scripts.sh b/src/elasticsearch-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/elasticsearch-asdf/library_scripts.sh +++ b/src/elasticsearch-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/elm-asdf/install.sh b/src/elm-asdf/install.sh index b9017f248..a24e6b2af 100755 --- a/src/elm-asdf/install.sh +++ b/src/elm-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='elm' --option version="$VERSION" - + echo 'Done!' diff --git a/src/elm-asdf/library_scripts.sh b/src/elm-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/elm-asdf/library_scripts.sh +++ b/src/elm-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ember-cli/install.sh b/src/ember-cli/install.sh index f4ea83cfa..84d55a269 100755 --- a/src/ember-cli/install.sh +++ b/src/ember-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='ember-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ember-cli/library_scripts.sh b/src/ember-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ember-cli/library_scripts.sh +++ b/src/ember-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/envoy/install.sh b/src/envoy/install.sh index 26cecb1aa..f83c8515a 100755 --- a/src/envoy/install.sh +++ b/src/envoy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='envoyproxy/envoy' --option binaryNames='envoy' --option version="$VERSION" --option assetRegex='^(?!.*(contrib))' - + echo 'Done!' diff --git a/src/envoy/library_scripts.sh b/src/envoy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/envoy/library_scripts.sh +++ b/src/envoy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/epinio/install.sh b/src/epinio/install.sh index a228e4811..fe59c7d99 100755 --- a/src/epinio/install.sh +++ b/src/epinio/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='epinio/epinio' --option binaryNames='epinio' --option version="$VERSION" - + echo 'Done!' diff --git a/src/epinio/library_scripts.sh b/src/epinio/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/epinio/library_scripts.sh +++ b/src/epinio/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/etcd/install.sh b/src/etcd/install.sh index 667a057d1..7a76d926a 100755 --- a/src/etcd/install.sh +++ b/src/etcd/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='etcd-io/etcd' --option binaryNames='etcd,etcdctl' --option version="$VERSION" --option libName='etcd' - + echo 'Done!' diff --git a/src/etcd/library_scripts.sh b/src/etcd/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/etcd/library_scripts.sh +++ b/src/etcd/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/exa/install.sh b/src/exa/install.sh index 47f8ccdbc..63c3328f7 100755 --- a/src/exa/install.sh +++ b/src/exa/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ogham/exa' --option binaryNames='exa' --option version="$VERSION" - + echo 'Done!' diff --git a/src/exa/library_scripts.sh b/src/exa/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/exa/library_scripts.sh +++ b/src/exa/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/exercism-cli/install.sh b/src/exercism-cli/install.sh index d7e87a0f5..fdf019e25 100755 --- a/src/exercism-cli/install.sh +++ b/src/exercism-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='exercism/cli' --option binaryNames='exercism' --option version="$VERSION" - + echo 'Done!' diff --git a/src/exercism-cli/library_scripts.sh b/src/exercism-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/exercism-cli/library_scripts.sh +++ b/src/exercism-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/expo-cli/install.sh b/src/expo-cli/install.sh index 77391add3..1ef7da7fd 100755 --- a/src/expo-cli/install.sh +++ b/src/expo-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='expo-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/expo-cli/library_scripts.sh b/src/expo-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/expo-cli/library_scripts.sh +++ b/src/expo-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/express-generator/install.sh b/src/express-generator/install.sh index 1bb46ba74..99ccf8b35 100755 --- a/src/express-generator/install.sh +++ b/src/express-generator/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='express-generator' --option version="$VERSION" - + echo 'Done!' diff --git a/src/express-generator/library_scripts.sh b/src/express-generator/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/express-generator/library_scripts.sh +++ b/src/express-generator/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fd/install.sh b/src/fd/install.sh index d4199d37c..7edb25949 100755 --- a/src/fd/install.sh +++ b/src/fd/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sharkdp/fd' --option binaryNames='fd' --option version="$VERSION" - + echo 'Done!' diff --git a/src/fd/library_scripts.sh b/src/fd/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/fd/library_scripts.sh +++ b/src/fd/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ffmpeg-apt-get/install.sh b/src/ffmpeg-apt-get/install.sh index d1053d5f2..dba885681 100755 --- a/src/ffmpeg-apt-get/install.sh +++ b/src/ffmpeg-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ffmpeg-apt-get/library_scripts.sh b/src/ffmpeg-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ffmpeg-apt-get/library_scripts.sh +++ b/src/ffmpeg-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/firebase-cli/install.sh b/src/firebase-cli/install.sh index 5d63b6d57..c41ecfa30 100755 --- a/src/firebase-cli/install.sh +++ b/src/firebase-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='firebase-tools' --option version="$VERSION" - + echo 'Done!' diff --git a/src/firebase-cli/library_scripts.sh b/src/firebase-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/firebase-cli/library_scripts.sh +++ b/src/firebase-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fish-apt-get/install.sh b/src/fish-apt-get/install.sh index 6586e4368..c72bf1421 100755 --- a/src/fish-apt-get/install.sh +++ b/src/fish-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/fish-apt-get/library_scripts.sh b/src/fish-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/fish-apt-get/library_scripts.sh +++ b/src/fish-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fkill/install.sh b/src/fkill/install.sh index 4d5d0dc08..41fabe046 100755 --- a/src/fkill/install.sh +++ b/src/fkill/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='fkill-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/fkill/library_scripts.sh b/src/fkill/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/fkill/library_scripts.sh +++ b/src/fkill/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/flake8/install.sh b/src/flake8/install.sh index fbbfc4b4f..a5e0da521 100755 --- a/src/flake8/install.sh +++ b/src/flake8/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='flake8' --option injections="$PLUGINS" --option version="$VERSION" - + echo 'Done!' diff --git a/src/flake8/library_scripts.sh b/src/flake8/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/flake8/library_scripts.sh +++ b/src/flake8/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/flink-sdkman/install.sh b/src/flink-sdkman/install.sh index d218d8447..5cf94041a 100755 --- a/src/flink-sdkman/install.sh +++ b/src/flink-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/flink-sdkman/library_scripts.sh b/src/flink-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/flink-sdkman/library_scripts.sh +++ b/src/flink-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/flit/install.sh b/src/flit/install.sh index 93bf633a7..d1f4a3c4b 100755 --- a/src/flit/install.sh +++ b/src/flit/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='flit' --option version="$VERSION" - + echo 'Done!' diff --git a/src/flit/library_scripts.sh b/src/flit/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/flit/library_scripts.sh +++ b/src/flit/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/former2-cli/install.sh b/src/former2-cli/install.sh index 6a63da326..e24965e72 100755 --- a/src/former2-cli/install.sh +++ b/src/former2-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='former2' --option version="$VERSION" - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@shopify/theme' --option version="$VERSION" - + echo 'Done!' diff --git a/src/former2-cli/library_scripts.sh b/src/former2-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/former2-cli/library_scripts.sh +++ b/src/former2-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fossil-apt-get/install.sh b/src/fossil-apt-get/install.sh index 3819e5c4b..1cbc34e60 100755 --- a/src/fossil-apt-get/install.sh +++ b/src/fossil-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/fossil-apt-get/library_scripts.sh b/src/fossil-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/fossil-apt-get/library_scripts.sh +++ b/src/fossil-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fossil-homebrew/install.sh b/src/fossil-homebrew/install.sh index 0f6c81efe..4ade76824 100755 --- a/src/fossil-homebrew/install.sh +++ b/src/fossil-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/fossil-homebrew/library_scripts.sh b/src/fossil-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/fossil-homebrew/library_scripts.sh +++ b/src/fossil-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fulcio/install.sh b/src/fulcio/install.sh index d2b35bf35..4344bf869 100755 --- a/src/fulcio/install.sh +++ b/src/fulcio/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigstore/fulcio' --option binaryNames='fulcio' --option version="$VERSION" - + echo 'Done!' diff --git a/src/fulcio/library_scripts.sh b/src/fulcio/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/fulcio/library_scripts.sh +++ b/src/fulcio/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/fzf/install.sh b/src/fzf/install.sh index c2948aa7b..be19b2459 100755 --- a/src/fzf/install.sh +++ b/src/fzf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='junegunn/fzf' --option binaryNames='fzf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/fzf/library_scripts.sh b/src/fzf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/fzf/library_scripts.sh +++ b/src/fzf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gaiden-sdkman/install.sh b/src/gaiden-sdkman/install.sh index 55383ce2c..a8202c4e0 100755 --- a/src/gaiden-sdkman/install.sh +++ b/src/gaiden-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/gaiden-sdkman/library_scripts.sh b/src/gaiden-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/gaiden-sdkman/library_scripts.sh +++ b/src/gaiden-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ganache/install.sh b/src/ganache/install.sh index b40362e18..16e3c9ba0 100755 --- a/src/ganache/install.sh +++ b/src/ganache/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='ganache' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ganache/library_scripts.sh b/src/ganache/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ganache/library_scripts.sh +++ b/src/ganache/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gdbgui/install.sh b/src/gdbgui/install.sh index 4ad9a156a..22ae1da06 100755 --- a/src/gdbgui/install.sh +++ b/src/gdbgui/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='gdbgui' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gdbgui/library_scripts.sh b/src/gdbgui/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gdbgui/library_scripts.sh +++ b/src/gdbgui/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gh-cli/install.sh b/src/gh-cli/install.sh index b5ba1243e..9b01bf469 100755 --- a/src/gh-cli/install.sh +++ b/src/gh-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='cli/cli' --option binaryNames='gh' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gh-cli/library_scripts.sh b/src/gh-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gh-cli/library_scripts.sh +++ b/src/gh-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/git-lfs/install.sh b/src/git-lfs/install.sh index 6d8db464a..0136d114a 100755 --- a/src/git-lfs/install.sh +++ b/src/git-lfs/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='git-lfs/git-lfs' --option binaryNames='git-lfs' --option version="$VERSION" - + echo 'Done!' diff --git a/src/git-lfs/library_scripts.sh b/src/git-lfs/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/git-lfs/library_scripts.sh +++ b/src/git-lfs/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gitmux/install.sh b/src/gitmux/install.sh index d46f080ac..ea04a9cad 100755 --- a/src/gitmux/install.sh +++ b/src/gitmux/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='arl/gitmux' --option binaryNames='gitmux' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gitmux/library_scripts.sh b/src/gitmux/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gitmux/library_scripts.sh +++ b/src/gitmux/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gitomatic/install.sh b/src/gitomatic/install.sh index 730242bb3..449e3766c 100755 --- a/src/gitomatic/install.sh +++ b/src/gitomatic/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='muesli/gitomatic' --option binaryNames='gitomatic' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gitomatic/library_scripts.sh b/src/gitomatic/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gitomatic/library_scripts.sh +++ b/src/gitomatic/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gitsign-credential-cache/install.sh b/src/gitsign-credential-cache/install.sh index dab174fa7..541078936 100755 --- a/src/gitsign-credential-cache/install.sh +++ b/src/gitsign-credential-cache/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigstore/gitsign' --option binaryNames='gitsign-credential-cache' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gitsign-credential-cache/library_scripts.sh b/src/gitsign-credential-cache/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gitsign-credential-cache/library_scripts.sh +++ b/src/gitsign-credential-cache/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gitsign/install.sh b/src/gitsign/install.sh index fa7ccde1e..cbf0f9733 100755 --- a/src/gitsign/install.sh +++ b/src/gitsign/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigstore/gitsign' --option binaryNames='gitsign' --option version="$VERSION" --option assetRegex='^(?!.*(credential-cache))' - + echo 'Done!' diff --git a/src/gitsign/library_scripts.sh b/src/gitsign/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gitsign/library_scripts.sh +++ b/src/gitsign/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gitty/install.sh b/src/gitty/install.sh index fc389a9c6..9889d2923 100755 --- a/src/gitty/install.sh +++ b/src/gitty/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='muesli/gitty' --option binaryNames='gitty' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gitty/library_scripts.sh b/src/gitty/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gitty/library_scripts.sh +++ b/src/gitty/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/glances/install.sh b/src/glances/install.sh index 52688e3f2..8de97e05d 100755 --- a/src/glances/install.sh +++ b/src/glances/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='glances[action,browser,cloud,cpuinfo,docker,export,folders,gpu,graph,ip,raid,snmp,web,wifi]' --option version="$VERSION" - + echo 'Done!' diff --git a/src/glances/library_scripts.sh b/src/glances/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/glances/library_scripts.sh +++ b/src/glances/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gleam/install.sh b/src/gleam/install.sh index da550c04e..f807e5513 100755 --- a/src/gleam/install.sh +++ b/src/gleam/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.24" \ --option repo='gleam-lang/gleam' --option binaryNames='gleam' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gleam/library_scripts.sh b/src/gleam/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gleam/library_scripts.sh +++ b/src/gleam/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/go-task/install.sh b/src/go-task/install.sh index af9737ead..92253394d 100755 --- a/src/go-task/install.sh +++ b/src/go-task/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='go-task/task' --option binaryNames='task' --option version="$VERSION" - + echo 'Done!' diff --git a/src/go-task/library_scripts.sh b/src/go-task/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/go-task/library_scripts.sh +++ b/src/go-task/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/graalvm-asdf/install.sh b/src/graalvm-asdf/install.sh index 9d4123951..c2cfb6912 100755 --- a/src/graalvm-asdf/install.sh +++ b/src/graalvm-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='graalvm' --option version="$VERSION" - + echo 'Done!' diff --git a/src/graalvm-asdf/library_scripts.sh b/src/graalvm-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/graalvm-asdf/library_scripts.sh +++ b/src/graalvm-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gradle-sdkman/install.sh b/src/gradle-sdkman/install.sh index ce5f856b6..c844cfe28 100755 --- a/src/gradle-sdkman/install.sh +++ b/src/gradle-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/gradle-sdkman/library_scripts.sh b/src/gradle-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/gradle-sdkman/library_scripts.sh +++ b/src/gradle-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gradleprofiler-sdkman/install.sh b/src/gradleprofiler-sdkman/install.sh index 5e7c9e641..1fb9b1c6f 100755 --- a/src/gradleprofiler-sdkman/install.sh +++ b/src/gradleprofiler-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/gradleprofiler-sdkman/library_scripts.sh b/src/gradleprofiler-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/gradleprofiler-sdkman/library_scripts.sh +++ b/src/gradleprofiler-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/grails-sdkman/install.sh b/src/grails-sdkman/install.sh index 889ee821f..7ebac738d 100755 --- a/src/grails-sdkman/install.sh +++ b/src/grails-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/grails-sdkman/library_scripts.sh b/src/grails-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/grails-sdkman/library_scripts.sh +++ b/src/grails-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/graphite-exporter/install.sh b/src/graphite-exporter/install.sh index 0ff8511b2..3b3903acd 100755 --- a/src/graphite-exporter/install.sh +++ b/src/graphite-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/graphite_exporter' --option binaryNames='graphite_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/graphite-exporter/library_scripts.sh b/src/graphite-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/graphite-exporter/library_scripts.sh +++ b/src/graphite-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/groovy-sdkman/install.sh b/src/groovy-sdkman/install.sh index 3f9150076..174566931 100755 --- a/src/groovy-sdkman/install.sh +++ b/src/groovy-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/groovy-sdkman/library_scripts.sh b/src/groovy-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/groovy-sdkman/library_scripts.sh +++ b/src/groovy-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/groovyserv-sdkman/install.sh b/src/groovyserv-sdkman/install.sh index 2e64734fc..17571c43b 100755 --- a/src/groovyserv-sdkman/install.sh +++ b/src/groovyserv-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/groovyserv-sdkman/library_scripts.sh b/src/groovyserv-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/groovyserv-sdkman/library_scripts.sh +++ b/src/groovyserv-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/grpcurl-asdf/install.sh b/src/grpcurl-asdf/install.sh index bbf2b4247..648e7f6de 100755 --- a/src/grpcurl-asdf/install.sh +++ b/src/grpcurl-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='grpcurl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/grpcurl-asdf/library_scripts.sh b/src/grpcurl-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/grpcurl-asdf/library_scripts.sh +++ b/src/grpcurl-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/grype/install.sh b/src/grype/install.sh index 9cf07dce8..8929c69cc 100755 --- a/src/grype/install.sh +++ b/src/grype/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='anchore/grype' --option binaryNames='grype' --option version="$VERSION" - + echo 'Done!' diff --git a/src/grype/library_scripts.sh b/src/grype/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/grype/library_scripts.sh +++ b/src/grype/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/gulp-cli/install.sh b/src/gulp-cli/install.sh index 7dddc1faf..61d88762f 100755 --- a/src/gulp-cli/install.sh +++ b/src/gulp-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='gulp-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/gulp-cli/library_scripts.sh b/src/gulp-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/gulp-cli/library_scripts.sh +++ b/src/gulp-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/hadoop-sdkman/install.sh b/src/hadoop-sdkman/install.sh index 7d7197bbc..e0fb21928 100755 --- a/src/hadoop-sdkman/install.sh +++ b/src/hadoop-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/hadoop-sdkman/library_scripts.sh b/src/hadoop-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/hadoop-sdkman/library_scripts.sh +++ b/src/hadoop-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/hatch/install.sh b/src/hatch/install.sh index df6e83aa1..6f18d0650 100755 --- a/src/hatch/install.sh +++ b/src/hatch/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='hatch' --option version="$VERSION" - + echo 'Done!' diff --git a/src/hatch/library_scripts.sh b/src/hatch/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/hatch/library_scripts.sh +++ b/src/hatch/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/haxe-asdf/install.sh b/src/haxe-asdf/install.sh index 37100dc1d..a423365cb 100755 --- a/src/haxe-asdf/install.sh +++ b/src/haxe-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='haxe' --option version="$VERSION" - + echo 'Done!' diff --git a/src/haxe-asdf/library_scripts.sh b/src/haxe-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/haxe-asdf/library_scripts.sh +++ b/src/haxe-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/hotel/install.sh b/src/hotel/install.sh index a26a66a34..31007a8a0 100755 --- a/src/hotel/install.sh +++ b/src/hotel/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='hotel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/hotel/library_scripts.sh b/src/hotel/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/hotel/library_scripts.sh +++ b/src/hotel/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/how2/install.sh b/src/how2/install.sh index 62ca4c769..43c46bedd 100755 --- a/src/how2/install.sh +++ b/src/how2/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='santinic/how2' --option binaryNames='how2' --option version="$VERSION" - + echo 'Done!' diff --git a/src/how2/library_scripts.sh b/src/how2/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/how2/library_scripts.sh +++ b/src/how2/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/http-server/install.sh b/src/http-server/install.sh index 56a9a4e00..99620f0c5 100755 --- a/src/http-server/install.sh +++ b/src/http-server/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='http-server' --option version="$VERSION" - + echo 'Done!' diff --git a/src/http-server/library_scripts.sh b/src/http-server/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/http-server/library_scripts.sh +++ b/src/http-server/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/http4k-sdkman/install.sh b/src/http4k-sdkman/install.sh index a6ffcc18f..a17d22a21 100755 --- a/src/http4k-sdkman/install.sh +++ b/src/http4k-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/http4k-sdkman/library_scripts.sh b/src/http4k-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/http4k-sdkman/library_scripts.sh +++ b/src/http4k-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/hyperfine/install.sh b/src/hyperfine/install.sh index f68b62036..6f4b43dee 100755 --- a/src/hyperfine/install.sh +++ b/src/hyperfine/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sharkdp/hyperfine' --option binaryNames='hyperfine' --option version="$VERSION" - + echo 'Done!' diff --git a/src/hyperfine/library_scripts.sh b/src/hyperfine/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/hyperfine/library_scripts.sh +++ b/src/hyperfine/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immuadmin-fips/install.sh b/src/immuadmin-fips/install.sh index 9f34c0dda..df8c36b85 100755 --- a/src/immuadmin-fips/install.sh +++ b/src/immuadmin-fips/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immuadmin' --option version="$VERSION" --option assetRegex='.*(fips).*' - + echo 'Done!' diff --git a/src/immuadmin-fips/library_scripts.sh b/src/immuadmin-fips/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immuadmin-fips/library_scripts.sh +++ b/src/immuadmin-fips/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immuadmin/install.sh b/src/immuadmin/install.sh index 189655e7b..8a8cc5340 100755 --- a/src/immuadmin/install.sh +++ b/src/immuadmin/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immuadmin' --option version="$VERSION" --option assetRegex='^(?!.*(fips))' - + echo 'Done!' diff --git a/src/immuadmin/library_scripts.sh b/src/immuadmin/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immuadmin/library_scripts.sh +++ b/src/immuadmin/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immuclient-fips/install.sh b/src/immuclient-fips/install.sh index 4fed3f26d..66094326c 100755 --- a/src/immuclient-fips/install.sh +++ b/src/immuclient-fips/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immuclient' --option version="$VERSION" --option assetRegex='.*(fips).*' - + echo 'Done!' diff --git a/src/immuclient-fips/library_scripts.sh b/src/immuclient-fips/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immuclient-fips/library_scripts.sh +++ b/src/immuclient-fips/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immuclient/install.sh b/src/immuclient/install.sh index a6781bb06..a20273d49 100755 --- a/src/immuclient/install.sh +++ b/src/immuclient/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immuclient' --option version="$VERSION" --option assetRegex='^(?!.*(fips))' - + echo 'Done!' diff --git a/src/immuclient/library_scripts.sh b/src/immuclient/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immuclient/library_scripts.sh +++ b/src/immuclient/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immudb-fips/install.sh b/src/immudb-fips/install.sh index 675cc0e67..0be56799e 100755 --- a/src/immudb-fips/install.sh +++ b/src/immudb-fips/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immudb' --option version="$VERSION" --option assetRegex='.*(fips).*' - + echo 'Done!' diff --git a/src/immudb-fips/library_scripts.sh b/src/immudb-fips/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immudb-fips/library_scripts.sh +++ b/src/immudb-fips/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/immudb/install.sh b/src/immudb/install.sh index 4146be683..8b69162ab 100755 --- a/src/immudb/install.sh +++ b/src/immudb/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='codenotary/immudb' --option binaryNames='immudb' --option version="$VERSION" --option assetRegex='^(?!.*(fips))' - + echo 'Done!' diff --git a/src/immudb/library_scripts.sh b/src/immudb/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/immudb/library_scripts.sh +++ b/src/immudb/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/infracost/install.sh b/src/infracost/install.sh index 44c39ea1f..71004d63e 100755 --- a/src/infracost/install.sh +++ b/src/infracost/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='infracost/infracost' --option binaryNames='infracost' --option version="$VERSION" - + echo 'Done!' diff --git a/src/infracost/library_scripts.sh b/src/infracost/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/infracost/library_scripts.sh +++ b/src/infracost/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/infrastructor-sdkman/install.sh b/src/infrastructor-sdkman/install.sh index 54f92776b..282ee77ee 100755 --- a/src/infrastructor-sdkman/install.sh +++ b/src/infrastructor-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/infrastructor-sdkman/library_scripts.sh b/src/infrastructor-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/infrastructor-sdkman/library_scripts.sh +++ b/src/infrastructor-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/invoke/install.sh b/src/invoke/install.sh index 2468d0828..442f49513 100755 --- a/src/invoke/install.sh +++ b/src/invoke/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='invoke' --option version="$VERSION" - + echo 'Done!' diff --git a/src/invoke/library_scripts.sh b/src/invoke/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/invoke/library_scripts.sh +++ b/src/invoke/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ionic-cli/install.sh b/src/ionic-cli/install.sh index fb5d0fba6..2c730e520 100755 --- a/src/ionic-cli/install.sh +++ b/src/ionic-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@ionic/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ionic-cli/library_scripts.sh b/src/ionic-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ionic-cli/library_scripts.sh +++ b/src/ionic-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/isort/install.sh b/src/isort/install.sh index 613ec893b..cb182b39e 100755 --- a/src/isort/install.sh +++ b/src/isort/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='isort' --option version="$VERSION" - + echo 'Done!' diff --git a/src/isort/library_scripts.sh b/src/isort/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/isort/library_scripts.sh +++ b/src/isort/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/istioctl/install.sh b/src/istioctl/install.sh index 21aebb816..930a8eb5a 100755 --- a/src/istioctl/install.sh +++ b/src/istioctl/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='istio/istio' --option binaryNames='istioctl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/istioctl/library_scripts.sh b/src/istioctl/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/istioctl/library_scripts.sh +++ b/src/istioctl/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jake/install.sh b/src/jake/install.sh index 20ba1e388..08022761e 100755 --- a/src/jake/install.sh +++ b/src/jake/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='jake' --option version="$VERSION" - + echo 'Done' diff --git a/src/jake/library_scripts.sh b/src/jake/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jake/library_scripts.sh +++ b/src/jake/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jbake-sdkman/install.sh b/src/jbake-sdkman/install.sh index e6f0ecbfb..45de9e138 100755 --- a/src/jbake-sdkman/install.sh +++ b/src/jbake-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/jbake-sdkman/library_scripts.sh b/src/jbake-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jbake-sdkman/library_scripts.sh +++ b/src/jbake-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jbang-sdkman/install.sh b/src/jbang-sdkman/install.sh index 579888f7c..d68dc1f6c 100755 --- a/src/jbang-sdkman/install.sh +++ b/src/jbang-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/jbang-sdkman/library_scripts.sh b/src/jbang-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jbang-sdkman/library_scripts.sh +++ b/src/jbang-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jenkinsx-cli/install.sh b/src/jenkinsx-cli/install.sh index 08c7ecc70..39977b35e 100755 --- a/src/jenkinsx-cli/install.sh +++ b/src/jenkinsx-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='jenkins-x/jx' --option binaryNames='jx' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jenkinsx-cli/library_scripts.sh b/src/jenkinsx-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jenkinsx-cli/library_scripts.sh +++ b/src/jenkinsx-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jest/install.sh b/src/jest/install.sh index b7de2b7d4..3fdc653d9 100755 --- a/src/jest/install.sh +++ b/src/jest/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jest' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jest/library_scripts.sh b/src/jest/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jest/library_scripts.sh +++ b/src/jest/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jfrog-cli-homebrew/install.sh b/src/jfrog-cli-homebrew/install.sh index 572773447..da5d43a57 100755 --- a/src/jfrog-cli-homebrew/install.sh +++ b/src/jfrog-cli-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/jfrog-cli-homebrew/library_scripts.sh b/src/jfrog-cli-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jfrog-cli-homebrew/library_scripts.sh +++ b/src/jfrog-cli-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jfrog-cli-npm/install.sh b/src/jfrog-cli-npm/install.sh index 03519d33d..72fec67bc 100755 --- a/src/jfrog-cli-npm/install.sh +++ b/src/jfrog-cli-npm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jfrog-cli-v2-jf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jfrog-cli-npm/library_scripts.sh b/src/jfrog-cli-npm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jfrog-cli-npm/library_scripts.sh +++ b/src/jfrog-cli-npm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jfrog-cli/install.sh b/src/jfrog-cli/install.sh index e97c1b1e1..1082c7790 100755 --- a/src/jfrog-cli/install.sh +++ b/src/jfrog-cli/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" @@ -16,7 +16,7 @@ $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.7" \ - + curl -fL https://install-cli.jfrog.io | sh && chown $_REMOTE_USER /usr/local/bin/jf && chmod a+x /usr/local/bin/jf diff --git a/src/jfrog-cli/library_scripts.sh b/src/jfrog-cli/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jfrog-cli/library_scripts.sh +++ b/src/jfrog-cli/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jira-cli/install.sh b/src/jira-cli/install.sh index c4948b6b5..05fb10e63 100755 --- a/src/jira-cli/install.sh +++ b/src/jira-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ankitpokhrel/jira-cli' --option binaryNames='jira' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jira-cli/library_scripts.sh b/src/jira-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jira-cli/library_scripts.sh +++ b/src/jira-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jmc-sdkman/install.sh b/src/jmc-sdkman/install.sh index 4f40f1bfa..b7d730b96 100755 --- a/src/jmc-sdkman/install.sh +++ b/src/jmc-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/jmc-sdkman/library_scripts.sh b/src/jmc-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jmc-sdkman/library_scripts.sh +++ b/src/jmc-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jmeter-sdkman/install.sh b/src/jmeter-sdkman/install.sh index 13493ea77..93d6c32eb 100755 --- a/src/jmeter-sdkman/install.sh +++ b/src/jmeter-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/jmeter-sdkman/library_scripts.sh b/src/jmeter-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jmeter-sdkman/library_scripts.sh +++ b/src/jmeter-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/joern-sdkman/install.sh b/src/joern-sdkman/install.sh index 26fce022a..3a22966e8 100755 --- a/src/joern-sdkman/install.sh +++ b/src/joern-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/joern-sdkman/library_scripts.sh b/src/joern-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/joern-sdkman/library_scripts.sh +++ b/src/joern-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jreleaser-sdkman/install.sh b/src/jreleaser-sdkman/install.sh index a4437f430..72db3d2f9 100755 --- a/src/jreleaser-sdkman/install.sh +++ b/src/jreleaser-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/jreleaser-sdkman/library_scripts.sh b/src/jreleaser-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/jreleaser-sdkman/library_scripts.sh +++ b/src/jreleaser-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jrnl/install.sh b/src/jrnl/install.sh index 3824197d4..2a455ad4f 100755 --- a/src/jrnl/install.sh +++ b/src/jrnl/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='jrnl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jrnl/library_scripts.sh b/src/jrnl/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jrnl/library_scripts.sh +++ b/src/jrnl/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jshint/install.sh b/src/jshint/install.sh index a4e77c4e1..f8b6e71d6 100755 --- a/src/jshint/install.sh +++ b/src/jshint/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jshint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jshint/library_scripts.sh b/src/jshint/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jshint/library_scripts.sh +++ b/src/jshint/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jsii-diff/install.sh b/src/jsii-diff/install.sh index 9e9636c45..e9f131285 100755 --- a/src/jsii-diff/install.sh +++ b/src/jsii-diff/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jsii-diff' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jsii-diff/library_scripts.sh b/src/jsii-diff/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jsii-diff/library_scripts.sh +++ b/src/jsii-diff/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jsii-pacmak/install.sh b/src/jsii-pacmak/install.sh index 19f44a183..cc35a0255 100755 --- a/src/jsii-pacmak/install.sh +++ b/src/jsii-pacmak/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jsii-pacmak' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jsii-pacmak/library_scripts.sh b/src/jsii-pacmak/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jsii-pacmak/library_scripts.sh +++ b/src/jsii-pacmak/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jsii-rosetta/install.sh b/src/jsii-rosetta/install.sh index b4ef51e05..1b4618983 100755 --- a/src/jsii-rosetta/install.sh +++ b/src/jsii-rosetta/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jsii-rosetta' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jsii-rosetta/library_scripts.sh b/src/jsii-rosetta/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jsii-rosetta/library_scripts.sh +++ b/src/jsii-rosetta/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/jsii/install.sh b/src/jsii/install.sh index 8e624b7f1..06c74998c 100755 --- a/src/jsii/install.sh +++ b/src/jsii/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='jsii' --option version="$VERSION" - + echo 'Done!' diff --git a/src/jsii/library_scripts.sh b/src/jsii/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/jsii/library_scripts.sh +++ b/src/jsii/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/json-server/install.sh b/src/json-server/install.sh index 8c8faba89..fe80e9bf5 100755 --- a/src/json-server/install.sh +++ b/src/json-server/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='json-server' --option version="$VERSION" - + echo 'Done!' diff --git a/src/json-server/library_scripts.sh b/src/json-server/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/json-server/library_scripts.sh +++ b/src/json-server/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/k2tf/install.sh b/src/k2tf/install.sh index 30eaa50c1..36f4f95bf 100755 --- a/src/k2tf/install.sh +++ b/src/k2tf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sl1pm4t/k2tf' --option binaryNames='k2tf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/k2tf/library_scripts.sh b/src/k2tf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/k2tf/library_scripts.sh +++ b/src/k2tf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/k6/install.sh b/src/k6/install.sh index 2734f8d18..78e1c604b 100755 --- a/src/k6/install.sh +++ b/src/k6/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='grafana/k6' --option binaryNames='k6' --option version="$VERSION" - + echo 'Done!' diff --git a/src/k6/library_scripts.sh b/src/k6/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/k6/library_scripts.sh +++ b/src/k6/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/karaf-sdkman/install.sh b/src/karaf-sdkman/install.sh index b716c66d1..355873cf5 100755 --- a/src/karaf-sdkman/install.sh +++ b/src/karaf-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/karaf-sdkman/library_scripts.sh b/src/karaf-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/karaf-sdkman/library_scripts.sh +++ b/src/karaf-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/keepercommander/install.sh b/src/keepercommander/install.sh index f2a9a3faf..adcbec25c 100755 --- a/src/keepercommander/install.sh +++ b/src/keepercommander/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='keepercommander' --option version="$VERSION" - + echo 'Done!' diff --git a/src/keepercommander/library_scripts.sh b/src/keepercommander/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/keepercommander/library_scripts.sh +++ b/src/keepercommander/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ki-sdkman/install.sh b/src/ki-sdkman/install.sh index 02a8e1b42..30c3ef66b 100755 --- a/src/ki-sdkman/install.sh +++ b/src/ki-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ki-sdkman/library_scripts.sh b/src/ki-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ki-sdkman/library_scripts.sh +++ b/src/ki-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kind/install.sh b/src/kind/install.sh index 81de4b1af..6c8f4b3dc 100755 --- a/src/kind/install.sh +++ b/src/kind/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='kubernetes-sigs/kind' --option binaryNames='kind' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kind/library_scripts.sh b/src/kind/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/kind/library_scripts.sh +++ b/src/kind/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kobweb-sdkman/install.sh b/src/kobweb-sdkman/install.sh index 21cc8d2d8..a3fcf44de 100755 --- a/src/kobweb-sdkman/install.sh +++ b/src/kobweb-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/kobweb-sdkman/library_scripts.sh b/src/kobweb-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/kobweb-sdkman/library_scripts.sh +++ b/src/kobweb-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kops/install.sh b/src/kops/install.sh index 4c258742d..5bf7ffa78 100755 --- a/src/kops/install.sh +++ b/src/kops/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='kubernetes/kops' --option binaryNames='kops' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kops/library_scripts.sh b/src/kops/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/kops/library_scripts.sh +++ b/src/kops/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kotlin-sdkman/install.sh b/src/kotlin-sdkman/install.sh index edb1702f4..ddd87b3db 100755 --- a/src/kotlin-sdkman/install.sh +++ b/src/kotlin-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/kotlin-sdkman/library_scripts.sh b/src/kotlin-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/kotlin-sdkman/library_scripts.sh +++ b/src/kotlin-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kscript-sdkman/install.sh b/src/kscript-sdkman/install.sh index 253c01203..94b9491a5 100755 --- a/src/kscript-sdkman/install.sh +++ b/src/kscript-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/kscript-sdkman/library_scripts.sh b/src/kscript-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/kscript-sdkman/library_scripts.sh +++ b/src/kscript-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kubeclarity-cli/install.sh b/src/kubeclarity-cli/install.sh index ee70558a0..7e5f14b4a 100755 --- a/src/kubeclarity-cli/install.sh +++ b/src/kubeclarity-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='openclarity/kubeclarity' --option binaryNames='kubeclarity-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kubeclarity-cli/library_scripts.sh b/src/kubeclarity-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/kubeclarity-cli/library_scripts.sh +++ b/src/kubeclarity-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kubectl-asdf/install.sh b/src/kubectl-asdf/install.sh index 8829794e0..98cbbfab6 100755 --- a/src/kubectl-asdf/install.sh +++ b/src/kubectl-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='kubectl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kubectl-asdf/library_scripts.sh b/src/kubectl-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/kubectl-asdf/library_scripts.sh +++ b/src/kubectl-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kubectx-kubens/install.sh b/src/kubectx-kubens/install.sh index 015cd3719..0aa1082ae 100755 --- a/src/kubectx-kubens/install.sh +++ b/src/kubectx-kubens/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ahmetb/kubectx' --option binaryNames='kubectx' --option assetRegex='kubectx.*' --option version="$VERSION" - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ahmetb/kubectx' --option binaryNames='kubens' --option assetRegex='kubens.*' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kubectx-kubens/library_scripts.sh b/src/kubectx-kubens/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/kubectx-kubens/library_scripts.sh +++ b/src/kubectx-kubens/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/kyverno-cli/install.sh b/src/kyverno-cli/install.sh index 5f6eb0fc1..ced7b8301 100755 --- a/src/kyverno-cli/install.sh +++ b/src/kyverno-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='kyverno/kyverno' --option binaryNames='kyverno' --option version="$VERSION" - + echo 'Done!' diff --git a/src/kyverno-cli/library_scripts.sh b/src/kyverno-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/kyverno-cli/library_scripts.sh +++ b/src/kyverno-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lastpass-cli-homebrew/install.sh b/src/lastpass-cli-homebrew/install.sh index 3dbb08324..c2c0366f3 100755 --- a/src/lastpass-cli-homebrew/install.sh +++ b/src/lastpass-cli-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/lastpass-cli-homebrew/library_scripts.sh b/src/lastpass-cli-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/lastpass-cli-homebrew/library_scripts.sh +++ b/src/lastpass-cli-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/layrry-sdkman/install.sh b/src/layrry-sdkman/install.sh index e8ac05ec3..d1864f071 100755 --- a/src/layrry-sdkman/install.sh +++ b/src/layrry-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/layrry-sdkman/library_scripts.sh b/src/layrry-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/layrry-sdkman/library_scripts.sh +++ b/src/layrry-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lean-asdf/install.sh b/src/lean-asdf/install.sh index 66af87137..9f45bb4fe 100755 --- a/src/lean-asdf/install.sh +++ b/src/lean-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='lean' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lean-asdf/library_scripts.sh b/src/lean-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/lean-asdf/library_scripts.sh +++ b/src/lean-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lefthook-asdf/install.sh b/src/lefthook-asdf/install.sh index e4f4a7aca..0cc564e7c 100755 --- a/src/lefthook-asdf/install.sh +++ b/src/lefthook-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='lefthook' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lefthook-asdf/library_scripts.sh b/src/lefthook-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/lefthook-asdf/library_scripts.sh +++ b/src/lefthook-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/leiningen-sdkman/install.sh b/src/leiningen-sdkman/install.sh index 9aefa55bc..2a2d25af6 100755 --- a/src/leiningen-sdkman/install.sh +++ b/src/leiningen-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/leiningen-sdkman/library_scripts.sh b/src/leiningen-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/leiningen-sdkman/library_scripts.sh +++ b/src/leiningen-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lektor/install.sh b/src/lektor/install.sh index a0a0ee8d4..40134c877 100755 --- a/src/lektor/install.sh +++ b/src/lektor/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='lektor' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lektor/library_scripts.sh b/src/lektor/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/lektor/library_scripts.sh +++ b/src/lektor/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lerna-npm/install.sh b/src/lerna-npm/install.sh index 0bfa75071..5c8dbcc6a 100755 --- a/src/lerna-npm/install.sh +++ b/src/lerna-npm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='lerna' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lerna-npm/library_scripts.sh b/src/lerna-npm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/lerna-npm/library_scripts.sh +++ b/src/lerna-npm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/less/install.sh b/src/less/install.sh index 490c2b9b5..90a4887e5 100755 --- a/src/less/install.sh +++ b/src/less/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='less' --option version="$VERSION" - + echo 'Done!' diff --git a/src/less/library_scripts.sh b/src/less/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/less/library_scripts.sh +++ b/src/less/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/levant-asdf/install.sh b/src/levant-asdf/install.sh index 5523a1ff2..5ca73b964 100755 --- a/src/levant-asdf/install.sh +++ b/src/levant-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='levant' --option version="$VERSION" - + echo 'Done!' diff --git a/src/levant-asdf/library_scripts.sh b/src/levant-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/levant-asdf/library_scripts.sh +++ b/src/levant-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lighthouse-cli/install.sh b/src/lighthouse-cli/install.sh index 29cf82eb0..6c272f7d1 100755 --- a/src/lighthouse-cli/install.sh +++ b/src/lighthouse-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='lighthouse' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lighthouse-cli/library_scripts.sh b/src/lighthouse-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/lighthouse-cli/library_scripts.sh +++ b/src/lighthouse-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/linkerd2-cli-edge/install.sh b/src/linkerd2-cli-edge/install.sh index a5edd3908..ccc540a75 100755 --- a/src/linkerd2-cli-edge/install.sh +++ b/src/linkerd2-cli-edge/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='linkerd/linkerd2' --option binaryNames='linkerd' --option releaseTagRegex='.*edge.*' --option version="$VERSION" - + echo 'Done!' diff --git a/src/linkerd2-cli-edge/library_scripts.sh b/src/linkerd2-cli-edge/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/linkerd2-cli-edge/library_scripts.sh +++ b/src/linkerd2-cli-edge/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/linkerd2-cli-stable/install.sh b/src/linkerd2-cli-stable/install.sh index 4dab201e5..4851f2a98 100755 --- a/src/linkerd2-cli-stable/install.sh +++ b/src/linkerd2-cli-stable/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='linkerd/linkerd2' --option binaryNames='linkerd' --option releaseTagRegex='.*stable.*' --option version="$VERSION" - + echo 'Done!' diff --git a/src/linkerd2-cli-stable/library_scripts.sh b/src/linkerd2-cli-stable/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/linkerd2-cli-stable/library_scripts.sh +++ b/src/linkerd2-cli-stable/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/linode-cli/install.sh b/src/linode-cli/install.sh index b8285a1db..e964fdf91 100755 --- a/src/linode-cli/install.sh +++ b/src/linode-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='linode-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/linode-cli/library_scripts.sh b/src/linode-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/linode-cli/library_scripts.sh +++ b/src/linode-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/lite-server/install.sh b/src/lite-server/install.sh index 7ee7afc59..e6269c080 100755 --- a/src/lite-server/install.sh +++ b/src/lite-server/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='lite-server' --option version="$VERSION" - + echo 'Done!' diff --git a/src/lite-server/library_scripts.sh b/src/lite-server/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/lite-server/library_scripts.sh +++ b/src/lite-server/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/live-server/install.sh b/src/live-server/install.sh index 4b8cf59f6..3904224e8 100755 --- a/src/live-server/install.sh +++ b/src/live-server/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='live-server' --option version="$VERSION" - + echo 'Done!' diff --git a/src/live-server/library_scripts.sh b/src/live-server/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/live-server/library_scripts.sh +++ b/src/live-server/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/localstack/library_scripts.sh b/src/localstack/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/localstack/library_scripts.sh +++ b/src/localstack/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/localtunnel-npm/install.sh b/src/localtunnel-npm/install.sh index 748c48427..fea73ac9f 100755 --- a/src/localtunnel-npm/install.sh +++ b/src/localtunnel-npm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='localtunnel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/localtunnel-npm/library_scripts.sh b/src/localtunnel-npm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/localtunnel-npm/library_scripts.sh +++ b/src/localtunnel-npm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mackup/install.sh b/src/mackup/install.sh index 11406d8e3..be83d5ce9 100755 --- a/src/mackup/install.sh +++ b/src/mackup/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mackup' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mackup/library_scripts.sh b/src/mackup/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mackup/library_scripts.sh +++ b/src/mackup/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/markdownlint-cli/install.sh b/src/markdownlint-cli/install.sh index bbc362cb1..48dce9b29 100755 --- a/src/markdownlint-cli/install.sh +++ b/src/markdownlint-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='markdownlint-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/markdownlint-cli/library_scripts.sh b/src/markdownlint-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/markdownlint-cli/library_scripts.sh +++ b/src/markdownlint-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/markdownlint-cli2/install.sh b/src/markdownlint-cli2/install.sh index f7959821d..81709946b 100755 --- a/src/markdownlint-cli2/install.sh +++ b/src/markdownlint-cli2/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='markdownlint-cli2' --option version="$VERSION" - + echo 'Done!' diff --git a/src/markdownlint-cli2/library_scripts.sh b/src/markdownlint-cli2/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/markdownlint-cli2/library_scripts.sh +++ b/src/markdownlint-cli2/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/maven-sdkman/install.sh b/src/maven-sdkman/install.sh index dbf90655d..c10770e10 100755 --- a/src/maven-sdkman/install.sh +++ b/src/maven-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/maven-sdkman/library_scripts.sh b/src/maven-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/maven-sdkman/library_scripts.sh +++ b/src/maven-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/meltano/install.sh b/src/meltano/install.sh index 5faf5444d..23c4c3796 100755 --- a/src/meltano/install.sh +++ b/src/meltano/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='meltano' --option version="$VERSION" - + echo 'Done!' diff --git a/src/meltano/library_scripts.sh b/src/meltano/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/meltano/library_scripts.sh +++ b/src/meltano/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/memcached-exporter/install.sh b/src/memcached-exporter/install.sh index 467e531fd..c6c334b08 100755 --- a/src/memcached-exporter/install.sh +++ b/src/memcached-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/memcached_exporter' --option binaryNames='memcached_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/memcached-exporter/library_scripts.sh b/src/memcached-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/memcached-exporter/library_scripts.sh +++ b/src/memcached-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/micro/install.sh b/src/micro/install.sh index cb1963517..ae2fe75ef 100755 --- a/src/micro/install.sh +++ b/src/micro/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/micro/library_scripts.sh b/src/micro/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/micro/library_scripts.sh +++ b/src/micro/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/micronaut-sdkman/install.sh b/src/micronaut-sdkman/install.sh index 75ee884a3..f88bb1c0d 100755 --- a/src/micronaut-sdkman/install.sh +++ b/src/micronaut-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/micronaut-sdkman/library_scripts.sh b/src/micronaut-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/micronaut-sdkman/library_scripts.sh +++ b/src/micronaut-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mitmproxy/install.sh b/src/mitmproxy/install.sh index 78010a704..db94aebfe 100755 --- a/src/mitmproxy/install.sh +++ b/src/mitmproxy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mitmproxy' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mitmproxy/library_scripts.sh b/src/mitmproxy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mitmproxy/library_scripts.sh +++ b/src/mitmproxy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mkcert/install.sh b/src/mkcert/install.sh index 639acd9fa..dd05342e4 100755 --- a/src/mkcert/install.sh +++ b/src/mkcert/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='FiloSottile/mkcert' --option binaryNames='mkcert' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mkcert/library_scripts.sh b/src/mkcert/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mkcert/library_scripts.sh +++ b/src/mkcert/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mkdocs/install.sh b/src/mkdocs/install.sh index f4d44a742..f6ddcd330 100755 --- a/src/mkdocs/install.sh +++ b/src/mkdocs/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mkdocs' --option injections="$PLUGINS" --option version="$VERSION" - + echo 'Done!' diff --git a/src/mkdocs/library_scripts.sh b/src/mkdocs/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mkdocs/library_scripts.sh +++ b/src/mkdocs/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mlocate-apt-get/install.sh b/src/mlocate-apt-get/install.sh index 0b030c769..5408f623a 100755 --- a/src/mlocate-apt-get/install.sh +++ b/src/mlocate-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/mlocate-apt-get/library_scripts.sh b/src/mlocate-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mlocate-apt-get/library_scripts.sh +++ b/src/mlocate-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mlton-asdf/install.sh b/src/mlton-asdf/install.sh index 50b2ed51b..10cad809a 100755 --- a/src/mlton-asdf/install.sh +++ b/src/mlton-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='mlton' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mlton-asdf/library_scripts.sh b/src/mlton-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mlton-asdf/library_scripts.sh +++ b/src/mlton-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mocha/install.sh b/src/mocha/install.sh index e4490be0d..21eb74123 100755 --- a/src/mocha/install.sh +++ b/src/mocha/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='mocha' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mocha/library_scripts.sh b/src/mocha/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mocha/library_scripts.sh +++ b/src/mocha/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mongodb-atlas-cli-homebrew/install.sh b/src/mongodb-atlas-cli-homebrew/install.sh index dc347293f..61c4c0bce 100755 --- a/src/mongodb-atlas-cli-homebrew/install.sh +++ b/src/mongodb-atlas-cli-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/mongodb-atlas-cli-homebrew/library_scripts.sh b/src/mongodb-atlas-cli-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mongodb-atlas-cli-homebrew/library_scripts.sh +++ b/src/mongodb-atlas-cli-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mongosh-homebrew/install.sh b/src/mongosh-homebrew/install.sh index 72455e7a5..b8d6d2c23 100755 --- a/src/mongosh-homebrew/install.sh +++ b/src/mongosh-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/mongosh-homebrew/library_scripts.sh b/src/mongosh-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mongosh-homebrew/library_scripts.sh +++ b/src/mongosh-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mosh-apt-get/install.sh b/src/mosh-apt-get/install.sh index 8a23f1e0d..93234e322 100755 --- a/src/mosh-apt-get/install.sh +++ b/src/mosh-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/mosh-apt-get/library_scripts.sh b/src/mosh-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mosh-apt-get/library_scripts.sh +++ b/src/mosh-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mosh-homebrew/install.sh b/src/mosh-homebrew/install.sh index 568653f9b..d0b5a6390 100755 --- a/src/mosh-homebrew/install.sh +++ b/src/mosh-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/mosh-homebrew/library_scripts.sh b/src/mosh-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mosh-homebrew/library_scripts.sh +++ b/src/mosh-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mulefd-sdkman/install.sh b/src/mulefd-sdkman/install.sh index 7e3ff9a19..fa2080ae3 100755 --- a/src/mulefd-sdkman/install.sh +++ b/src/mulefd-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/mulefd-sdkman/library_scripts.sh b/src/mulefd-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mulefd-sdkman/library_scripts.sh +++ b/src/mulefd-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mvnd-sdkman/install.sh b/src/mvnd-sdkman/install.sh index 98f7664e1..85cfb9b47 100755 --- a/src/mvnd-sdkman/install.sh +++ b/src/mvnd-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/mvnd-sdkman/library_scripts.sh b/src/mvnd-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mvnd-sdkman/library_scripts.sh +++ b/src/mvnd-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mybatis-sdkman/install.sh b/src/mybatis-sdkman/install.sh index ef47ec7be..8fe2f456a 100755 --- a/src/mybatis-sdkman/install.sh +++ b/src/mybatis-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/mybatis-sdkman/library_scripts.sh b/src/mybatis-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mybatis-sdkman/library_scripts.sh +++ b/src/mybatis-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mypy/install.sh b/src/mypy/install.sh index 0da8656d3..7d4d11cb3 100755 --- a/src/mypy/install.sh +++ b/src/mypy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='mypy' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mypy/library_scripts.sh b/src/mypy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mypy/library_scripts.sh +++ b/src/mypy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mysql-homebrew/install.sh b/src/mysql-homebrew/install.sh index c06a0d9cb..46673c73b 100755 --- a/src/mysql-homebrew/install.sh +++ b/src/mysql-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/mysql-homebrew/library_scripts.sh b/src/mysql-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/mysql-homebrew/library_scripts.sh +++ b/src/mysql-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/mysqld-exporter/install.sh b/src/mysqld-exporter/install.sh index 71c707a30..ceb3346d5 100755 --- a/src/mysqld-exporter/install.sh +++ b/src/mysqld-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/mysqld_exporter' --option binaryNames='mysqld_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/mysqld-exporter/library_scripts.sh b/src/mysqld-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/mysqld-exporter/library_scripts.sh +++ b/src/mysqld-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/n8n/install.sh b/src/n8n/install.sh index 707f3b021..0d7cbc5fe 100755 --- a/src/n8n/install.sh +++ b/src/n8n/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='n8n' --option version="$VERSION" - + echo 'Done!' diff --git a/src/n8n/library_scripts.sh b/src/n8n/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/n8n/library_scripts.sh +++ b/src/n8n/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nancy/install.sh b/src/nancy/install.sh index 2429f27ca..32888b283 100755 --- a/src/nancy/install.sh +++ b/src/nancy/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sonatype-nexus-community/nancy' --option binaryNames='nancy' --option version="$VERSION" --option assetRegex='.*(.tar.gz)$' - + echo 'Done!' diff --git a/src/nancy/library_scripts.sh b/src/nancy/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/nancy/library_scripts.sh +++ b/src/nancy/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/navi/install.sh b/src/navi/install.sh index 96887afb0..763c5c3f6 100755 --- a/src/navi/install.sh +++ b/src/navi/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='denisidoro/navi' --option binaryNames='navi' --option version="$VERSION" - + echo 'Done!' diff --git a/src/navi/library_scripts.sh b/src/navi/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/navi/library_scripts.sh +++ b/src/navi/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ncdu/install.sh b/src/ncdu/install.sh index 8b5a42bb6..faabfe4c6 100755 --- a/src/ncdu/install.sh +++ b/src/ncdu/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/ncdu/library_scripts.sh b/src/ncdu/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ncdu/library_scripts.sh +++ b/src/ncdu/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/neko-asdf/install.sh b/src/neko-asdf/install.sh index a18c1c5e2..7b52d4c34 100755 --- a/src/neko-asdf/install.sh +++ b/src/neko-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='neko' --option version="$VERSION" - + echo 'Done!' diff --git a/src/neko-asdf/library_scripts.sh b/src/neko-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/neko-asdf/library_scripts.sh +++ b/src/neko-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/neo4jmigrations-sdkman/install.sh b/src/neo4jmigrations-sdkman/install.sh index d3f91a51a..bde21a30e 100755 --- a/src/neo4jmigrations-sdkman/install.sh +++ b/src/neo4jmigrations-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/neo4jmigrations-sdkman/library_scripts.sh b/src/neo4jmigrations-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/neo4jmigrations-sdkman/library_scripts.sh +++ b/src/neo4jmigrations-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/neofetch/install.sh b/src/neofetch/install.sh index 2adbf9417..82e4ce88d 100755 --- a/src/neofetch/install.sh +++ b/src/neofetch/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/neofetch/library_scripts.sh b/src/neofetch/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/neofetch/library_scripts.sh +++ b/src/neofetch/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/neovim-apt-get/install.sh b/src/neovim-apt-get/install.sh index 4d7a8ce19..dccb45ff6 100755 --- a/src/neovim-apt-get/install.sh +++ b/src/neovim-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.46" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.6" \ --option packages='neovim' --option ppas='ppa:neovim-ppa/stable' - + echo 'Done!' diff --git a/src/neovim-apt-get/library_scripts.sh b/src/neovim-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/neovim-apt-get/library_scripts.sh +++ b/src/neovim-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/neovim-homebrew/install.sh b/src/neovim-homebrew/install.sh index 220a6a8ef..c66bc07a1 100755 --- a/src/neovim-homebrew/install.sh +++ b/src/neovim-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/neovim-homebrew/library_scripts.sh b/src/neovim-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/neovim-homebrew/library_scripts.sh +++ b/src/neovim-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nestjs-cli/install.sh b/src/nestjs-cli/install.sh index 9dc086c13..fe85123e9 100755 --- a/src/nestjs-cli/install.sh +++ b/src/nestjs-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@nestjs/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/nestjs-cli/library_scripts.sh b/src/nestjs-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/nestjs-cli/library_scripts.sh +++ b/src/nestjs-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/netdata/library_scripts.sh b/src/netdata/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/netdata/library_scripts.sh +++ b/src/netdata/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/netlify-cli/install.sh b/src/netlify-cli/install.sh index 9c7c94ed5..5f8fd72b3 100755 --- a/src/netlify-cli/install.sh +++ b/src/netlify-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='netlify-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/netlify-cli/library_scripts.sh b/src/netlify-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/netlify-cli/library_scripts.sh +++ b/src/netlify-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nim-asdf/install.sh b/src/nim-asdf/install.sh index 94ee99dca..a30f734bd 100755 --- a/src/nim-asdf/install.sh +++ b/src/nim-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='nim' --option version="$VERSION" - + echo 'Done!' diff --git a/src/nim-asdf/library_scripts.sh b/src/nim-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nim-asdf/library_scripts.sh +++ b/src/nim-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ninja-asdf/install.sh b/src/ninja-asdf/install.sh index 42b684fe0..6cadad048 100755 --- a/src/ninja-asdf/install.sh +++ b/src/ninja-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='ninja' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ninja-asdf/library_scripts.sh b/src/ninja-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ninja-asdf/library_scripts.sh +++ b/src/ninja-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nmap-apt-get/install.sh b/src/nmap-apt-get/install.sh index e47d4128a..1a99b5035 100755 --- a/src/nmap-apt-get/install.sh +++ b/src/nmap-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/nmap-apt-get/library_scripts.sh b/src/nmap-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nmap-apt-get/library_scripts.sh +++ b/src/nmap-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nmap-homebrew/install.sh b/src/nmap-homebrew/install.sh index 9694e89e7..db544ffa3 100755 --- a/src/nmap-homebrew/install.sh +++ b/src/nmap-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/nmap-homebrew/library_scripts.sh b/src/nmap-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nmap-homebrew/library_scripts.sh +++ b/src/nmap-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nnn-apt-get/install.sh b/src/nnn-apt-get/install.sh index 57a9752ed..8bdaa0016 100755 --- a/src/nnn-apt-get/install.sh +++ b/src/nnn-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/nnn-apt-get/library_scripts.sh b/src/nnn-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nnn-apt-get/library_scripts.sh +++ b/src/nnn-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nnn-homebrew/install.sh b/src/nnn-homebrew/install.sh index 38ff88bb3..cb4f0c1c5 100755 --- a/src/nnn-homebrew/install.sh +++ b/src/nnn-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/nnn-homebrew/library_scripts.sh b/src/nnn-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nnn-homebrew/library_scripts.sh +++ b/src/nnn-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/node-asdf/install.sh b/src/node-asdf/install.sh index f633abb4c..3debcda68 100755 --- a/src/node-asdf/install.sh +++ b/src/node-asdf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='nodejs' --option version="$VERSION" - + echo 'Done!' diff --git a/src/node-asdf/library_scripts.sh b/src/node-asdf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/node-asdf/library_scripts.sh +++ b/src/node-asdf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/node-exporter/install.sh b/src/node-exporter/install.sh index c7c216477..c59230d2f 100755 --- a/src/node-exporter/install.sh +++ b/src/node-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/node_exporter' --option binaryNames='node_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/node-exporter/library_scripts.sh b/src/node-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/node-exporter/library_scripts.sh +++ b/src/node-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nomad-asdf/install.sh b/src/nomad-asdf/install.sh index cb7ac5c6c..2d55acd89 100755 --- a/src/nomad-asdf/install.sh +++ b/src/nomad-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='nomad' --option version="$VERSION" - + echo 'Done!' diff --git a/src/nomad-asdf/library_scripts.sh b/src/nomad-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/nomad-asdf/library_scripts.sh +++ b/src/nomad-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/nox/install.sh b/src/nox/install.sh index 34fe7a82b..c6ee27c0f 100755 --- a/src/nox/install.sh +++ b/src/nox/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='nox' --option version="$VERSION" - + echo 'Done!' diff --git a/src/nox/library_scripts.sh b/src/nox/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/nox/library_scripts.sh +++ b/src/nox/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/npm-package/install.sh b/src/npm-package/install.sh index 83a47d47c..a53ad256e 100755 --- a/src/npm-package/install.sh +++ b/src/npm-package/install.sh @@ -13,7 +13,7 @@ if [ -z "$PACKAGE" ]; then fi if [ "$(id -u)" -ne 0 ]; then - echo -e 'Script must be run as + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' exit 1 fi diff --git a/src/nx-npm/install.sh b/src/nx-npm/install.sh index 5d7266f39..e89856ee9 100755 --- a/src/nx-npm/install.sh +++ b/src/nx-npm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='nx' --option version="$VERSION" - + echo 'Done!' diff --git a/src/nx-npm/library_scripts.sh b/src/nx-npm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/nx-npm/library_scripts.sh +++ b/src/nx-npm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ocaml-asdf/install.sh b/src/ocaml-asdf/install.sh index c2b9bcf2d..61e700922 100755 --- a/src/ocaml-asdf/install.sh +++ b/src/ocaml-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='ocaml' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ocaml-asdf/library_scripts.sh b/src/ocaml-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ocaml-asdf/library_scripts.sh +++ b/src/ocaml-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/oclif/install.sh b/src/oclif/install.sh index b6c81fed1..8b20806f7 100755 --- a/src/oclif/install.sh +++ b/src/oclif/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='oclif' --option version="$VERSION" - + echo 'Done!' diff --git a/src/oclif/library_scripts.sh b/src/oclif/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/oclif/library_scripts.sh +++ b/src/oclif/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/opa/install.sh b/src/opa/install.sh index 5c2fa3b4e..b45685450 100755 --- a/src/opa/install.sh +++ b/src/opa/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='open-policy-agent/opa' --option binaryNames='opa' --option version="$VERSION" - + echo 'Done!' diff --git a/src/opa/library_scripts.sh b/src/opa/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/opa/library_scripts.sh +++ b/src/opa/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/opam-asdf/install.sh b/src/opam-asdf/install.sh index 77ec7f0ba..fff27346b 100755 --- a/src/opam-asdf/install.sh +++ b/src/opam-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='opam' --option version="$VERSION" - + echo 'Done!' diff --git a/src/opam-asdf/library_scripts.sh b/src/opam-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/opam-asdf/library_scripts.sh +++ b/src/opam-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ory-cli/install.sh b/src/ory-cli/install.sh index 37306ce37..39f85a44d 100755 --- a/src/ory-cli/install.sh +++ b/src/ory-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ory/cli' --option binaryNames='ory' --option version="$VERSION" --option assetRegex='.*(sqlite).*' - + echo 'Done!' diff --git a/src/ory-cli/library_scripts.sh b/src/ory-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ory-cli/library_scripts.sh +++ b/src/ory-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ory-hydra/install.sh b/src/ory-hydra/install.sh index ff3a6d8ef..7aeebfc34 100755 --- a/src/ory-hydra/install.sh +++ b/src/ory-hydra/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ory/hydra' --option binaryNames='hydra' --option version="$VERSION" --option assetRegex='.*(sqlite).*' - + echo 'Done!' diff --git a/src/ory-hydra/library_scripts.sh b/src/ory-hydra/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ory-hydra/library_scripts.sh +++ b/src/ory-hydra/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ory-kratos/install.sh b/src/ory-kratos/install.sh index ff7e036c2..a5c27e591 100755 --- a/src/ory-kratos/install.sh +++ b/src/ory-kratos/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ory/kratos' --option binaryNames='kratos' --option version="$VERSION" --option assetRegex='.*(sqlite).*' - + echo 'Done!' diff --git a/src/ory-kratos/library_scripts.sh b/src/ory-kratos/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ory-kratos/library_scripts.sh +++ b/src/ory-kratos/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ory-oathkeeper/install.sh b/src/ory-oathkeeper/install.sh index ec24f7734..238453daf 100755 --- a/src/ory-oathkeeper/install.sh +++ b/src/ory-oathkeeper/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='ory/oathkeeper' --option binaryNames='oathkeeper' --option version="$VERSION" --option assetRegex='.*(sqlite).*' - + echo 'Done!' diff --git a/src/ory-oathkeeper/library_scripts.sh b/src/ory-oathkeeper/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ory-oathkeeper/library_scripts.sh +++ b/src/ory-oathkeeper/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/packer-asdf/install.sh b/src/packer-asdf/install.sh index f7c1c92ba..5bfecd765 100755 --- a/src/packer-asdf/install.sh +++ b/src/packer-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='packer' --option version="$VERSION" - + echo 'Done!' diff --git a/src/packer-asdf/library_scripts.sh b/src/packer-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/packer-asdf/library_scripts.sh +++ b/src/packer-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pandoc/install.sh b/src/pandoc/install.sh index 8d036239f..ce51495c0 100755 --- a/src/pandoc/install.sh +++ b/src/pandoc/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='jgm/pandoc' --option binaryNames='pandoc' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pandoc/library_scripts.sh b/src/pandoc/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pandoc/library_scripts.sh +++ b/src/pandoc/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pass-apt-get/install.sh b/src/pass-apt-get/install.sh index bec3c39d7..a4b01d9ed 100755 --- a/src/pass-apt-get/install.sh +++ b/src/pass-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/pass-apt-get/library_scripts.sh b/src/pass-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/pass-apt-get/library_scripts.sh +++ b/src/pass-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pdm/install.sh b/src/pdm/install.sh index eb368be4f..76f3c89a9 100755 --- a/src/pdm/install.sh +++ b/src/pdm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pdm' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pdm/library_scripts.sh b/src/pdm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pdm/library_scripts.sh +++ b/src/pdm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/peco-asdf/install.sh b/src/peco-asdf/install.sh index a1f611e55..e77f67ade 100755 --- a/src/peco-asdf/install.sh +++ b/src/peco-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='peco' --option version="$VERSION" - + echo 'Done!' diff --git a/src/peco-asdf/library_scripts.sh b/src/peco-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/peco-asdf/library_scripts.sh +++ b/src/peco-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/perl-asdf/install.sh b/src/perl-asdf/install.sh index 9efe31dd9..5b1fa2f1d 100755 --- a/src/perl-asdf/install.sh +++ b/src/perl-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='perl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/perl-asdf/library_scripts.sh b/src/perl-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/perl-asdf/library_scripts.sh +++ b/src/perl-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pierrot-sdkman/install.sh b/src/pierrot-sdkman/install.sh index 365b0f68f..f88ba7f6c 100755 --- a/src/pierrot-sdkman/install.sh +++ b/src/pierrot-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/pierrot-sdkman/library_scripts.sh b/src/pierrot-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/pierrot-sdkman/library_scripts.sh +++ b/src/pierrot-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pipenv/install.sh b/src/pipenv/install.sh index 64b026327..44ed374ad 100755 --- a/src/pipenv/install.sh +++ b/src/pipenv/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pipenv' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pipenv/library_scripts.sh b/src/pipenv/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pipenv/library_scripts.sh +++ b/src/pipenv/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pipx-package/install.sh b/src/pipx-package/install.sh index 0c89c5710..74b92fa12 100755 --- a/src/pipx-package/install.sh +++ b/src/pipx-package/install.sh @@ -7,6 +7,7 @@ INJECTIONS=${INJECTIONS:-""} INCLUDEDEPS=${INCLUDEDEPS:-"false"} INTERPRETER=${INTERPRETER:-""} +# PEP 668 compatibility # PEP 668 compatibility export PIP_BREAK_SYSTEM_PACKAGES=1 diff --git a/src/pnpm/install.sh b/src/pnpm/install.sh index fbfbc9fd7..64e266000 100755 --- a/src/pnpm/install.sh +++ b/src/pnpm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='pnpm' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pnpm/library_scripts.sh b/src/pnpm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pnpm/library_scripts.sh +++ b/src/pnpm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/podman-homebrew/install.sh b/src/podman-homebrew/install.sh index 9dedf8580..4d8b5e806 100755 --- a/src/podman-homebrew/install.sh +++ b/src/podman-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/podman-homebrew/library_scripts.sh b/src/podman-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/podman-homebrew/library_scripts.sh +++ b/src/podman-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/poetry/install.sh b/src/poetry/install.sh index a66cd0b7b..8e998fa90 100755 --- a/src/poetry/install.sh +++ b/src/poetry/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='poetry' --option version="$VERSION" - + echo 'Done!' diff --git a/src/poetry/library_scripts.sh b/src/poetry/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/poetry/library_scripts.sh +++ b/src/poetry/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pomchecker-sdkman/install.sh b/src/pomchecker-sdkman/install.sh index d984d32fd..5b1e04836 100755 --- a/src/pomchecker-sdkman/install.sh +++ b/src/pomchecker-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/pomchecker-sdkman/library_scripts.sh b/src/pomchecker-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/pomchecker-sdkman/library_scripts.sh +++ b/src/pomchecker-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/poppler-utils-apt-get/README.md b/src/poppler-utils-apt-get/README.md index 4f1c75595..18ebbfcd3 100644 --- a/src/poppler-utils-apt-get/README.md +++ b/src/poppler-utils-apt-get/README.md @@ -1,18 +1,18 @@ # poppler-utils (pdftotext, pdftoppm, pdftops, pdfattach, pdfdetach, pdffonts, pdfimages, pdfinfo, pdfseparate, pdftocairo, pdftohtml, pdfunite) (via apt-get) (poppler-utils-apt-get) -poppler-utils is a collection of command-line utilities built on Poppler's library API, to manage PDF and extract contents: -pdfattach - add a new embedded file (attachment) to an existing PDF -pdfdetach - extract embedded documents from a PDF -pdffonts - lists the fonts used in a PDF -pdfimages - extract all embedded images at native resolution from a PDF -pdfinfo - list all information of a PDF -pdfseparate - extract single pages from a PDF -pdftocairo - convert single pages from a PDF to vector or bitmap formats using cairo -pdftohtml - convert PDF to HTML format retaining formatting -pdftoppm - convert a PDF page to a bitmap -pdftops - convert PDF to printable PS format -pdftotext - extract all text from PDF +poppler-utils is a collection of command-line utilities built on Poppler's library API, to manage PDF and extract contents: +pdfattach - add a new embedded file (attachment) to an existing PDF +pdfdetach - extract embedded documents from a PDF +pdffonts - lists the fonts used in a PDF +pdfimages - extract all embedded images at native resolution from a PDF +pdfinfo - list all information of a PDF +pdfseparate - extract single pages from a PDF +pdftocairo - convert single pages from a PDF to vector or bitmap formats using cairo +pdftohtml - convert PDF to HTML format retaining formatting +pdftoppm - convert a PDF page to a bitmap +pdftops - convert PDF to printable PS format +pdftotext - extract all text from PDF pdfunite - merges several PDFs. ## Example DevContainer Usage diff --git a/src/poppler-utils-apt-get/install.sh b/src/poppler-utils-apt-get/install.sh index 7b72cd497..cad5f851e 100755 --- a/src/poppler-utils-apt-get/install.sh +++ b/src/poppler-utils-apt-get/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.6" \ --option packages='poppler-utils' - + echo 'Done!' diff --git a/src/poppler-utils-apt-get/library_scripts.sh b/src/poppler-utils-apt-get/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/poppler-utils-apt-get/library_scripts.sh +++ b/src/poppler-utils-apt-get/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/powerbi-visuals-tools/install.sh b/src/powerbi-visuals-tools/install.sh index 6f7ac9747..76a886d36 100755 --- a/src/powerbi-visuals-tools/install.sh +++ b/src/powerbi-visuals-tools/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='powerbi-visuals-tools' --option version="$VERSION" - + echo 'Done!' diff --git a/src/powerbi-visuals-tools/library_scripts.sh b/src/powerbi-visuals-tools/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/powerbi-visuals-tools/library_scripts.sh +++ b/src/powerbi-visuals-tools/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/powershell/install.sh b/src/powershell/install.sh index 9df7a3775..f727955c0 100755 --- a/src/powershell/install.sh +++ b/src/powershell/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='powershell/powershell' --option binaryNames='pwsh' --option version="$VERSION" --option assetRegex='^(?!.*(fxdependent))' - + echo 'Done!' diff --git a/src/powershell/library_scripts.sh b/src/powershell/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/powershell/library_scripts.sh +++ b/src/powershell/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pre-commit/install.sh b/src/pre-commit/install.sh index 13db22769..25ca64358 100755 --- a/src/pre-commit/install.sh +++ b/src/pre-commit/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pre-commit' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pre-commit/library_scripts.sh b/src/pre-commit/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pre-commit/library_scripts.sh +++ b/src/pre-commit/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/prettier/install.sh b/src/prettier/install.sh index e0216f623..23ffe2bc6 100755 --- a/src/prettier/install.sh +++ b/src/prettier/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='prettier' --option version="$VERSION" - + echo 'Done!' diff --git a/src/prettier/library_scripts.sh b/src/prettier/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/prettier/library_scripts.sh +++ b/src/prettier/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/prisma/install.sh b/src/prisma/install.sh index 64c1be175..4b46122c0 100755 --- a/src/prisma/install.sh +++ b/src/prisma/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='prisma' --option version="$VERSION" - + echo 'Done!' diff --git a/src/prisma/library_scripts.sh b/src/prisma/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/prisma/library_scripts.sh +++ b/src/prisma/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/projen/install.sh b/src/projen/install.sh index 798eb37db..2f0310f43 100755 --- a/src/projen/install.sh +++ b/src/projen/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='projen' --option version="$VERSION" - + echo 'Done!' diff --git a/src/projen/library_scripts.sh b/src/projen/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/projen/library_scripts.sh +++ b/src/projen/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/prometheus/install.sh b/src/prometheus/install.sh index 248ec521b..1ac3ee561 100755 --- a/src/prometheus/install.sh +++ b/src/prometheus/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/prometheus' --option binaryNames='prometheus' --option version="$VERSION" - + echo 'Done!' diff --git a/src/prometheus/library_scripts.sh b/src/prometheus/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/prometheus/library_scripts.sh +++ b/src/prometheus/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/promlens/install.sh b/src/promlens/install.sh index 7bce8663d..79ed57d8b 100755 --- a/src/promlens/install.sh +++ b/src/promlens/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/promlens' --option binaryNames='promlens' --option version="$VERSION" - + echo 'Done!' diff --git a/src/promlens/library_scripts.sh b/src/promlens/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/promlens/library_scripts.sh +++ b/src/promlens/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/protoc-asdf/install.sh b/src/protoc-asdf/install.sh index f6437c65e..27485e14a 100755 --- a/src/protoc-asdf/install.sh +++ b/src/protoc-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='protoc' --option version="$VERSION" - + echo 'Done!' diff --git a/src/protoc-asdf/library_scripts.sh b/src/protoc-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/protoc-asdf/library_scripts.sh +++ b/src/protoc-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/protoc/install.sh b/src/protoc/install.sh index 360b415cb..4c4f3a187 100755 --- a/src/protoc/install.sh +++ b/src/protoc/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='protocolbuffers/protobuf' --option binaryNames='protoc' --option version="$VERSION" - + echo 'Done!' diff --git a/src/protoc/library_scripts.sh b/src/protoc/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/protoc/library_scripts.sh +++ b/src/protoc/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pushgateway/install.sh b/src/pushgateway/install.sh index 9230569a6..9e6e8c8e9 100755 --- a/src/pushgateway/install.sh +++ b/src/pushgateway/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/pushgateway' --option binaryNames='pushgateway' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pushgateway/library_scripts.sh b/src/pushgateway/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pushgateway/library_scripts.sh +++ b/src/pushgateway/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pyinfra/install.sh b/src/pyinfra/install.sh index c12dc7c51..ad5cd4464 100755 --- a/src/pyinfra/install.sh +++ b/src/pyinfra/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyinfra' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pyinfra/library_scripts.sh b/src/pyinfra/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pyinfra/library_scripts.sh +++ b/src/pyinfra/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pylint/install.sh b/src/pylint/install.sh index 03eb318df..cf2ffd5d6 100755 --- a/src/pylint/install.sh +++ b/src/pylint/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pylint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pylint/library_scripts.sh b/src/pylint/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pylint/library_scripts.sh +++ b/src/pylint/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pyoxidizer/install.sh b/src/pyoxidizer/install.sh index ee74f4da7..a22b1a76b 100755 --- a/src/pyoxidizer/install.sh +++ b/src/pyoxidizer/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyoxidizer' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pyoxidizer/library_scripts.sh b/src/pyoxidizer/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pyoxidizer/library_scripts.sh +++ b/src/pyoxidizer/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/pyscaffold/install.sh b/src/pyscaffold/install.sh index 398772c9c..5d79883b1 100755 --- a/src/pyscaffold/install.sh +++ b/src/pyscaffold/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='pyscaffold[all]' --option version="$VERSION" - + echo 'Done!' diff --git a/src/pyscaffold/library_scripts.sh b/src/pyscaffold/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/pyscaffold/library_scripts.sh +++ b/src/pyscaffold/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/qrcode/install.sh b/src/qrcode/install.sh index f6e89e885..94e9f2d12 100755 --- a/src/qrcode/install.sh +++ b/src/qrcode/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='qrcode[pil]' --option version="$VERSION" - + echo 'Done!' diff --git a/src/qrcode/library_scripts.sh b/src/qrcode/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/qrcode/library_scripts.sh +++ b/src/qrcode/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/quarkus-sdkman/install.sh b/src/quarkus-sdkman/install.sh index 9d8a2c2ad..cc9f1530b 100755 --- a/src/quarkus-sdkman/install.sh +++ b/src/quarkus-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/quarkus-sdkman/library_scripts.sh b/src/quarkus-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/quarkus-sdkman/library_scripts.sh +++ b/src/quarkus-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/quasar-cli/install.sh b/src/quasar-cli/install.sh index 7c918a79a..1bdd5709e 100755 --- a/src/quasar-cli/install.sh +++ b/src/quasar-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@quasar/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/quasar-cli/library_scripts.sh b/src/quasar-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/quasar-cli/library_scripts.sh +++ b/src/quasar-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/raku-asdf/install.sh b/src/raku-asdf/install.sh index 745f1fb06..ac6c4382c 100755 --- a/src/raku-asdf/install.sh +++ b/src/raku-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.4" \ --option packages='ca-certificates' - + $nanolayer_location \ @@ -25,7 +25,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='raku' --option version="$VERSION" - + echo 'Done!' diff --git a/src/raku-asdf/library_scripts.sh b/src/raku-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/raku-asdf/library_scripts.sh +++ b/src/raku-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/rclone/install.sh b/src/rclone/install.sh index f259d6a9b..83cba8ed9 100755 --- a/src/rclone/install.sh +++ b/src/rclone/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='rclone/rclone' --option binaryNames='rclone' --option version="$VERSION" - + echo 'Done!' diff --git a/src/rclone/library_scripts.sh b/src/rclone/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/rclone/library_scripts.sh +++ b/src/rclone/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/redis-homebrew/install.sh b/src/redis-homebrew/install.sh index 7cdadd20e..8723ec591 100755 --- a/src/redis-homebrew/install.sh +++ b/src/redis-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/redis-homebrew/library_scripts.sh b/src/redis-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/redis-homebrew/library_scripts.sh +++ b/src/redis-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/rekor-cli/install.sh b/src/rekor-cli/install.sh index 30d9b9f06..115ea6923 100755 --- a/src/rekor-cli/install.sh +++ b/src/rekor-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='sigstore/rekor' --option binaryNames='rekor-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/rekor-cli/library_scripts.sh b/src/rekor-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/rekor-cli/library_scripts.sh +++ b/src/rekor-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/renovate-cli/install.sh b/src/renovate-cli/install.sh index 696aeb687..35f0baac0 100755 --- a/src/renovate-cli/install.sh +++ b/src/renovate-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='renovate' --option version="$VERSION" - + echo 'Done!' diff --git a/src/renovate-cli/library_scripts.sh b/src/renovate-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/renovate-cli/library_scripts.sh +++ b/src/renovate-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ripgrep/install.sh b/src/ripgrep/install.sh index d722c44b9..4d6b35178 100755 --- a/src/ripgrep/install.sh +++ b/src/ripgrep/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='BurntSushi/ripgrep' --option binaryNames='rg' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ripgrep/library_scripts.sh b/src/ripgrep/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ripgrep/library_scripts.sh +++ b/src/ripgrep/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/rollup/install.sh b/src/rollup/install.sh index e03daa749..5662d5adf 100755 --- a/src/rollup/install.sh +++ b/src/rollup/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='rollup' --option version="$VERSION" - + echo 'Done!' diff --git a/src/rollup/library_scripts.sh b/src/rollup/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/rollup/library_scripts.sh +++ b/src/rollup/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ruby-asdf/install.sh b/src/ruby-asdf/install.sh index 051c213f0..313854575 100755 --- a/src/ruby-asdf/install.sh +++ b/src/ruby-asdf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/apt-get-packages:1.0.6" \ --option packages='curl,ca-certificates,software-properties-common,build-essential,gnupg2,libreadline-dev,procps,dirmngr,gawk,autoconf,automake,bison,libffi-dev,libgdbm-dev,libncurses5-dev,libsqlite3-dev,libtool,libyaml-dev,pkg-config,sqlite3,zlib1g-dev,libgmp-dev,libssl-dev' - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.8" \ --option plugin='ruby' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ruby-asdf/library_scripts.sh b/src/ruby-asdf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ruby-asdf/library_scripts.sh +++ b/src/ruby-asdf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ruff/install.sh b/src/ruff/install.sh index 1f3d07436..52e445ded 100755 --- a/src/ruff/install.sh +++ b/src/ruff/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='ruff' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ruff/library_scripts.sh b/src/ruff/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ruff/library_scripts.sh +++ b/src/ruff/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/salesforce-cli/install.sh b/src/salesforce-cli/install.sh index 65bbd9501..372ff815a 100755 --- a/src/salesforce-cli/install.sh +++ b/src/salesforce-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@salesforce/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/salesforce-cli/library_scripts.sh b/src/salesforce-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/salesforce-cli/library_scripts.sh +++ b/src/salesforce-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/salesforce-sfdx/install.sh b/src/salesforce-sfdx/install.sh index 18b2318b4..a95b4dfb8 100755 --- a/src/salesforce-sfdx/install.sh +++ b/src/salesforce-sfdx/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='sfdx-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/salesforce-sfdx/library_scripts.sh b/src/salesforce-sfdx/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/salesforce-sfdx/library_scripts.sh +++ b/src/salesforce-sfdx/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sanity-cli/install.sh b/src/sanity-cli/install.sh index b62d014d8..fb57bb16a 100755 --- a/src/sanity-cli/install.sh +++ b/src/sanity-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@sanity/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/sanity-cli/library_scripts.sh b/src/sanity-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sanity-cli/library_scripts.sh +++ b/src/sanity-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sap-piper/install.sh b/src/sap-piper/install.sh index 07a47f4ea..9c10a1625 100755 --- a/src/sap-piper/install.sh +++ b/src/sap-piper/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='SAP/jenkins-library' --option binaryNames='piper' --option version="$VERSION" --option assetRegex='^(?!.*(master))' - + echo 'Done!' diff --git a/src/sap-piper/library_scripts.sh b/src/sap-piper/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sap-piper/library_scripts.sh +++ b/src/sap-piper/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sbt-sdkman/install.sh b/src/sbt-sdkman/install.sh index 55ccb69fb..7034bc37c 100755 --- a/src/sbt-sdkman/install.sh +++ b/src/sbt-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/sbt-sdkman/library_scripts.sh b/src/sbt-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/sbt-sdkman/library_scripts.sh +++ b/src/sbt-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/scala-sdkman/install.sh b/src/scala-sdkman/install.sh index 478e66e9f..b2601007d 100755 --- a/src/scala-sdkman/install.sh +++ b/src/scala-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/scala-sdkman/library_scripts.sh b/src/scala-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/scala-sdkman/library_scripts.sh +++ b/src/scala-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/scalacli-sdkman/install.sh b/src/scalacli-sdkman/install.sh index 5d7a94a77..cdb300b7b 100755 --- a/src/scalacli-sdkman/install.sh +++ b/src/scalacli-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/scalacli-sdkman/library_scripts.sh b/src/scalacli-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/scalacli-sdkman/library_scripts.sh +++ b/src/scalacli-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/scancode-toolkit/install.sh b/src/scancode-toolkit/install.sh index e3e3f163d..4758ee8a6 100755 --- a/src/scancode-toolkit/install.sh +++ b/src/scancode-toolkit/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='scancode-toolkit' --option version="$VERSION" - + echo 'Done' diff --git a/src/scancode-toolkit/library_scripts.sh b/src/scancode-toolkit/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/scancode-toolkit/library_scripts.sh +++ b/src/scancode-toolkit/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/schemacrawler-sdkman/install.sh b/src/schemacrawler-sdkman/install.sh index 940f7bf82..2a8b59d83 100755 --- a/src/schemacrawler-sdkman/install.sh +++ b/src/schemacrawler-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/schemacrawler-sdkman/library_scripts.sh b/src/schemacrawler-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/schemacrawler-sdkman/library_scripts.sh +++ b/src/schemacrawler-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sentinel-asdf/install.sh b/src/sentinel-asdf/install.sh index c7a0e7596..fc9eaf8a7 100755 --- a/src/sentinel-asdf/install.sh +++ b/src/sentinel-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='sentinel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/sentinel-asdf/library_scripts.sh b/src/sentinel-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/sentinel-asdf/library_scripts.sh +++ b/src/sentinel-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/serf-asdf/install.sh b/src/serf-asdf/install.sh index 930f51471..94e7c0af1 100755 --- a/src/serf-asdf/install.sh +++ b/src/serf-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='serf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/serf-asdf/library_scripts.sh b/src/serf-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/serf-asdf/library_scripts.sh +++ b/src/serf-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/shfmt/install.sh b/src/shfmt/install.sh index 3ce0bf54f..ae0aaa42f 100755 --- a/src/shfmt/install.sh +++ b/src/shfmt/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.18" \ --option repo='mvdan/sh' --option binaryNames='shfmt' --option version="$VERSION" - + echo 'Done!' diff --git a/src/shfmt/library_scripts.sh b/src/shfmt/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/shfmt/library_scripts.sh +++ b/src/shfmt/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/shopify-cli/install.sh b/src/shopify-cli/install.sh index 9451d8d76..c089b3e15 100755 --- a/src/shopify-cli/install.sh +++ b/src/shopify-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@shopify/cli' --option version="$VERSION" - + $nanolayer_location \ @@ -24,7 +24,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@shopify/theme' --option version="$VERSION" - + echo 'Done!' diff --git a/src/shopify-cli/library_scripts.sh b/src/shopify-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/shopify-cli/library_scripts.sh +++ b/src/shopify-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sigstore-python/install.sh b/src/sigstore-python/install.sh index bbd2e1917..75994b557 100755 --- a/src/sigstore-python/install.sh +++ b/src/sigstore-python/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='sigstore' --option version="$VERSION" - + echo 'Done' diff --git a/src/sigstore-python/library_scripts.sh b/src/sigstore-python/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sigstore-python/library_scripts.sh +++ b/src/sigstore-python/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/snyk-cli/install.sh b/src/snyk-cli/install.sh index 8253b62bc..cbc3ab03e 100755 --- a/src/snyk-cli/install.sh +++ b/src/snyk-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='snyk@latest' --option version="$VERSION" - + echo 'Done!' diff --git a/src/snyk-cli/library_scripts.sh b/src/snyk-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/snyk-cli/library_scripts.sh +++ b/src/snyk-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sops/install.sh b/src/sops/install.sh index cefbca13c..fdec2d5f1 100755 --- a/src/sops/install.sh +++ b/src/sops/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='mozilla/sops' --option binaryNames='sops' --option version="$VERSION" - + echo 'Done!' diff --git a/src/sops/library_scripts.sh b/src/sops/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sops/library_scripts.sh +++ b/src/sops/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/spacectl/install.sh b/src/spacectl/install.sh index c2938b540..783578f04 100755 --- a/src/spacectl/install.sh +++ b/src/spacectl/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='spacelift-io/spacectl' --option binaryNames='spacectl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/spacectl/library_scripts.sh b/src/spacectl/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/spacectl/library_scripts.sh +++ b/src/spacectl/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/spark-sdkman/install.sh b/src/spark-sdkman/install.sh index d7e420ab7..24df61a7e 100755 --- a/src/spark-sdkman/install.sh +++ b/src/spark-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/spark-sdkman/library_scripts.sh b/src/spark-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/spark-sdkman/library_scripts.sh +++ b/src/spark-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/spicedb/install.sh b/src/spicedb/install.sh index 95228b850..3b546d558 100755 --- a/src/spicedb/install.sh +++ b/src/spicedb/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='authzed/spicedb' --option binaryNames='spicedb' --option version="$VERSION" - + echo 'Done!' diff --git a/src/spicedb/library_scripts.sh b/src/spicedb/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/spicedb/library_scripts.sh +++ b/src/spicedb/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/springboot-sdkman/install.sh b/src/springboot-sdkman/install.sh index 71aba9917..20bb756b2 100755 --- a/src/springboot-sdkman/install.sh +++ b/src/springboot-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/springboot-sdkman/library_scripts.sh b/src/springboot-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/springboot-sdkman/library_scripts.sh +++ b/src/springboot-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sqlfluff/install.sh b/src/sqlfluff/install.sh index fc1d1d58d..7168d2b8b 100755 --- a/src/sqlfluff/install.sh +++ b/src/sqlfluff/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='sqlfluff' --option injections="$PLUGINS" --option version="$VERSION" - + echo 'Done!' diff --git a/src/sqlfluff/library_scripts.sh b/src/sqlfluff/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sqlfluff/library_scripts.sh +++ b/src/sqlfluff/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/squarespace-server/install.sh b/src/squarespace-server/install.sh index 6f3acd644..d72972e4c 100755 --- a/src/squarespace-server/install.sh +++ b/src/squarespace-server/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@squarespace/server' --option version="$VERSION" - + echo 'Done!' diff --git a/src/squarespace-server/library_scripts.sh b/src/squarespace-server/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/squarespace-server/library_scripts.sh +++ b/src/squarespace-server/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sshoogr-sdkman/install.sh b/src/sshoogr-sdkman/install.sh index 36e8c30fb..6c1cde902 100755 --- a/src/sshoogr-sdkman/install.sh +++ b/src/sshoogr-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/sshoogr-sdkman/library_scripts.sh b/src/sshoogr-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/sshoogr-sdkman/library_scripts.sh +++ b/src/sshoogr-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/starship-homebrew/install.sh b/src/starship-homebrew/install.sh index e5ae48b65..cdca761ec 100755 --- a/src/starship-homebrew/install.sh +++ b/src/starship-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/starship-homebrew/library_scripts.sh b/src/starship-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/starship-homebrew/library_scripts.sh +++ b/src/starship-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/starship/install.sh b/src/starship/install.sh index 8bb88de6f..cc82262f1 100755 --- a/src/starship/install.sh +++ b/src/starship/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='starship/starship' --option binaryNames='starship' --option version="$VERSION" - + echo 'Done!' diff --git a/src/starship/library_scripts.sh b/src/starship/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/starship/library_scripts.sh +++ b/src/starship/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/statsd-exporter/install.sh b/src/statsd-exporter/install.sh index cc3f81d10..dd4499f46 100755 --- a/src/statsd-exporter/install.sh +++ b/src/statsd-exporter/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='prometheus/statsd_exporter' --option binaryNames='statsd_exporter' --option version="$VERSION" - + echo 'Done!' diff --git a/src/statsd-exporter/library_scripts.sh b/src/statsd-exporter/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/statsd-exporter/library_scripts.sh +++ b/src/statsd-exporter/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/stew/install.sh b/src/stew/install.sh index 300e23414..13b56167d 100755 --- a/src/stew/install.sh +++ b/src/stew/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='marwanhawari/stew' --option binaryNames='stew' --option version="$VERSION" - + echo 'Done!' diff --git a/src/stew/library_scripts.sh b/src/stew/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/stew/library_scripts.sh +++ b/src/stew/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/supabase-cli/install.sh b/src/supabase-cli/install.sh index 636e7402e..445df9835 100755 --- a/src/supabase-cli/install.sh +++ b/src/supabase-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='supabase/cli' --option binaryNames='supabase' --option version="$VERSION" - + echo 'Done!' diff --git a/src/supabase-cli/library_scripts.sh b/src/supabase-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/supabase-cli/library_scripts.sh +++ b/src/supabase-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/surge-cli/install.sh b/src/surge-cli/install.sh index 0f885a9b9..31729f919 100755 --- a/src/surge-cli/install.sh +++ b/src/surge-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='surge' --option version="$VERSION" - + echo 'Done!' diff --git a/src/surge-cli/library_scripts.sh b/src/surge-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/surge-cli/library_scripts.sh +++ b/src/surge-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/sv2v/install.sh b/src/sv2v/install.sh index ac755cb09..5279a569b 100755 --- a/src/sv2v/install.sh +++ b/src/sv2v/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='zachjs/sv2v' --option binaryNames='sv2v' --option version="$VERSION" - + echo 'Done!' diff --git a/src/sv2v/library_scripts.sh b/src/sv2v/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/sv2v/library_scripts.sh +++ b/src/sv2v/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/svu-asdf/install.sh b/src/svu-asdf/install.sh index d479c92d9..eb5e5cb08 100755 --- a/src/svu-asdf/install.sh +++ b/src/svu-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='svu' --option version="$VERSION" - + echo 'Done!' diff --git a/src/svu-asdf/library_scripts.sh b/src/svu-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/svu-asdf/library_scripts.sh +++ b/src/svu-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/syft/install.sh b/src/syft/install.sh index c35b1b0f6..75fdf36fc 100755 --- a/src/syft/install.sh +++ b/src/syft/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='anchore/syft' --option binaryNames='syft' --option version="$VERSION" - + echo 'Done!' diff --git a/src/syft/library_scripts.sh b/src/syft/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/syft/library_scripts.sh +++ b/src/syft/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/syntaqx-serve/install.sh b/src/syntaqx-serve/install.sh index 872dfa1a6..66a820b1f 100755 --- a/src/syntaqx-serve/install.sh +++ b/src/syntaqx-serve/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='syntaqx/serve' --option binaryNames='serve' --option version="$VERSION" - + echo 'Done!' diff --git a/src/syntaqx-serve/library_scripts.sh b/src/syntaqx-serve/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/syntaqx-serve/library_scripts.sh +++ b/src/syntaqx-serve/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tailscale/install.sh b/src/tailscale/install.sh index ebe8aaa0a..c818806f4 100755 --- a/src/tailscale/install.sh +++ b/src/tailscale/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" @@ -15,8 +15,8 @@ ensure_nanolayer nanolayer_location "v0.4.39" $nanolayer_location \ install \ devcontainer-feature \ - "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.16" - + "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.16" + curl -fsSL https://tailscale.com/install.sh | sh diff --git a/src/tailscale/library_scripts.sh b/src/tailscale/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tailscale/library_scripts.sh +++ b/src/tailscale/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/taxi-sdkman/install.sh b/src/taxi-sdkman/install.sh index bfc9d2cc6..987a96f04 100755 --- a/src/taxi-sdkman/install.sh +++ b/src/taxi-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/taxi-sdkman/library_scripts.sh b/src/taxi-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/taxi-sdkman/library_scripts.sh +++ b/src/taxi-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tea/install.sh b/src/tea/install.sh index 05def09c3..68ccbfe04 100755 --- a/src/tea/install.sh +++ b/src/tea/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='teaxyz/cli' --option binaryNames='tea' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tea/library_scripts.sh b/src/tea/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tea/library_scripts.sh +++ b/src/tea/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tekton-cli/install.sh b/src/tekton-cli/install.sh index 6c1b6c241..acd01d97d 100755 --- a/src/tekton-cli/install.sh +++ b/src/tekton-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='tektoncd/cli' --option binaryNames='tkn' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tekton-cli/library_scripts.sh b/src/tekton-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tekton-cli/library_scripts.sh +++ b/src/tekton-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tempo/install.sh b/src/tempo/install.sh index df83102f3..155c73fe3 100755 --- a/src/tempo/install.sh +++ b/src/tempo/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='grafana/tempo' --option binaryNames='tempo,tempo-cli,tempo-query' --option version="$VERSION" --option libName='tempo' - + echo 'Done!' diff --git a/src/tempo/library_scripts.sh b/src/tempo/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tempo/library_scripts.sh +++ b/src/tempo/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/temporal-cli/install.sh b/src/temporal-cli/install.sh index 908736d65..26d95e84e 100755 --- a/src/temporal-cli/install.sh +++ b/src/temporal-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='temporalio/cli' --option binaryNames='temporal' --option version="$VERSION" - + echo 'Done!' diff --git a/src/temporal-cli/library_scripts.sh b/src/temporal-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/temporal-cli/library_scripts.sh +++ b/src/temporal-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terracognita/install.sh b/src/terracognita/install.sh index d9591fe6a..0339de64d 100755 --- a/src/terracognita/install.sh +++ b/src/terracognita/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='cycloidio/terracognita' --option binaryNames='terracognita' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terracognita/library_scripts.sh b/src/terracognita/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/terracognita/library_scripts.sh +++ b/src/terracognita/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terraform-asdf/install.sh b/src/terraform-asdf/install.sh index d7284da2a..eabbca721 100755 --- a/src/terraform-asdf/install.sh +++ b/src/terraform-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='terraform' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terraform-asdf/library_scripts.sh b/src/terraform-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/terraform-asdf/library_scripts.sh +++ b/src/terraform-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terraform-docs/install.sh b/src/terraform-docs/install.sh index 8e1541113..08611b199 100755 --- a/src/terraform-docs/install.sh +++ b/src/terraform-docs/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='terraform-docs/terraform-docs' --option binaryNames='terraform-docs' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terraform-docs/library_scripts.sh b/src/terraform-docs/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/terraform-docs/library_scripts.sh +++ b/src/terraform-docs/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terraform-ls-asdf/install.sh b/src/terraform-ls-asdf/install.sh index 660a8c31f..db7e17afd 100755 --- a/src/terraform-ls-asdf/install.sh +++ b/src/terraform-ls-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='terraform-ls' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terraform-ls-asdf/library_scripts.sh b/src/terraform-ls-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/terraform-ls-asdf/library_scripts.sh +++ b/src/terraform-ls-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terraformer/install.sh b/src/terraformer/install.sh index 332bdded4..0414731e6 100755 --- a/src/terraformer/install.sh +++ b/src/terraformer/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='GoogleCloudPlatform/terraformer' --option binaryNames='terraformer' --option version="$VERSION" --option assetRegex='.*(all).*' - + echo 'Done!' diff --git a/src/terraformer/library_scripts.sh b/src/terraformer/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/terraformer/library_scripts.sh +++ b/src/terraformer/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terragrunt/install.sh b/src/terragrunt/install.sh index d06513421..2a80b3e81 100755 --- a/src/terragrunt/install.sh +++ b/src/terragrunt/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='gruntwork-io/terragrunt' --option binaryNames='terragrunt' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terragrunt/library_scripts.sh b/src/terragrunt/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/terragrunt/library_scripts.sh +++ b/src/terragrunt/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/terramate/install.sh b/src/terramate/install.sh index 20ba60f4d..cfa6cb541 100755 --- a/src/terramate/install.sh +++ b/src/terramate/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='mineiros-io/terramate' --option binaryNames='terramate' --option version="$VERSION" - + echo 'Done!' diff --git a/src/terramate/library_scripts.sh b/src/terramate/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/terramate/library_scripts.sh +++ b/src/terramate/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tfc-agent-asdf/install.sh b/src/tfc-agent-asdf/install.sh index 9ac96203d..393a6fc8f 100755 --- a/src/tfc-agent-asdf/install.sh +++ b/src/tfc-agent-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='tfc-agent' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tfc-agent-asdf/library_scripts.sh b/src/tfc-agent-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tfc-agent-asdf/library_scripts.sh +++ b/src/tfc-agent-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tfcdk-cli/install.sh b/src/tfcdk-cli/install.sh index 21d71193d..f39c41955 100755 --- a/src/tfcdk-cli/install.sh +++ b/src/tfcdk-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='cdktf-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tfcdk-cli/library_scripts.sh b/src/tfcdk-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tfcdk-cli/library_scripts.sh +++ b/src/tfcdk-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tfenv-homebrew/install.sh b/src/tfenv-homebrew/install.sh index a30432ca3..ef0e68e04 100755 --- a/src/tfenv-homebrew/install.sh +++ b/src/tfenv-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/tfenv-homebrew/library_scripts.sh b/src/tfenv-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tfenv-homebrew/library_scripts.sh +++ b/src/tfenv-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tfsec/install.sh b/src/tfsec/install.sh index 1e4162239..38dce88e7 100755 --- a/src/tfsec/install.sh +++ b/src/tfsec/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='aquasecurity/tfsec' --option binaryNames='tfsec' --option version="$VERSION" --option assetRegex='^(?!.*(checkgen|tar))' - + echo 'Done!' diff --git a/src/tfsec/library_scripts.sh b/src/tfsec/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tfsec/library_scripts.sh +++ b/src/tfsec/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tfswitch/install.sh b/src/tfswitch/install.sh index a8222e262..f636cd50d 100755 --- a/src/tfswitch/install.sh +++ b/src/tfswitch/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" @@ -16,7 +16,7 @@ $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/curl-apt-get:1.0.7" \ - + curl -L https://raw.githubusercontent.com/warrensbox/terraform-switcher/release/install.sh | bash diff --git a/src/tfswitch/library_scripts.sh b/src/tfswitch/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tfswitch/library_scripts.sh +++ b/src/tfswitch/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tldr/install.sh b/src/tldr/install.sh index 39ad2d5d4..12d6cb470 100755 --- a/src/tldr/install.sh +++ b/src/tldr/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='tldr' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tldr/library_scripts.sh b/src/tldr/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tldr/library_scripts.sh +++ b/src/tldr/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tmate/install.sh b/src/tmate/install.sh index 85b7776d8..1406954d1 100755 --- a/src/tmate/install.sh +++ b/src/tmate/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/tmate/library_scripts.sh b/src/tmate/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tmate/library_scripts.sh +++ b/src/tmate/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tmux-apt-get/install.sh b/src/tmux-apt-get/install.sh index 212f949a7..6d889b7c3 100755 --- a/src/tmux-apt-get/install.sh +++ b/src/tmux-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/tmux-apt-get/library_scripts.sh b/src/tmux-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tmux-apt-get/library_scripts.sh +++ b/src/tmux-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tmux-homebrew/install.sh b/src/tmux-homebrew/install.sh index 4bd2e06c0..a67ed6fc9 100755 --- a/src/tmux-homebrew/install.sh +++ b/src/tmux-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/tmux-homebrew/library_scripts.sh b/src/tmux-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tmux-homebrew/library_scripts.sh +++ b/src/tmux-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tomcat-sdkman/install.sh b/src/tomcat-sdkman/install.sh index 960283a41..91a1818f3 100755 --- a/src/tomcat-sdkman/install.sh +++ b/src/tomcat-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/tomcat-sdkman/library_scripts.sh b/src/tomcat-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tomcat-sdkman/library_scripts.sh +++ b/src/tomcat-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tooljet-cli/install.sh b/src/tooljet-cli/install.sh index 46c85e47d..c20370ded 100755 --- a/src/tooljet-cli/install.sh +++ b/src/tooljet-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@tooljet/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tooljet-cli/library_scripts.sh b/src/tooljet-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tooljet-cli/library_scripts.sh +++ b/src/tooljet-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/toolkit-sdkman/install.sh b/src/toolkit-sdkman/install.sh index 5bcfdf933..d75364d21 100755 --- a/src/toolkit-sdkman/install.sh +++ b/src/toolkit-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/toolkit-sdkman/library_scripts.sh b/src/toolkit-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/toolkit-sdkman/library_scripts.sh +++ b/src/toolkit-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tox/install.sh b/src/tox/install.sh index c34100f09..3a5405282 100755 --- a/src/tox/install.sh +++ b/src/tox/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='tox' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tox/library_scripts.sh b/src/tox/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tox/library_scripts.sh +++ b/src/tox/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/trello-cli/install.sh b/src/trello-cli/install.sh index 1e8ce6bc7..b1c356606 100755 --- a/src/trello-cli/install.sh +++ b/src/trello-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='trello-cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/trello-cli/library_scripts.sh b/src/trello-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/trello-cli/library_scripts.sh +++ b/src/trello-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tridentctl-asdf/install.sh b/src/tridentctl-asdf/install.sh index 53c98fa04..27e6253ee 100755 --- a/src/tridentctl-asdf/install.sh +++ b/src/tridentctl-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='tridentctl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tridentctl-asdf/library_scripts.sh b/src/tridentctl-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/tridentctl-asdf/library_scripts.sh +++ b/src/tridentctl-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/truffle/install.sh b/src/truffle/install.sh index 037971136..992e53ec2 100755 --- a/src/truffle/install.sh +++ b/src/truffle/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='truffle' --option version="$VERSION" - + echo 'Done!' diff --git a/src/truffle/library_scripts.sh b/src/truffle/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/truffle/library_scripts.sh +++ b/src/truffle/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ts-node/README.md b/src/ts-node/README.md index 805de805c..7ed57fce1 100644 --- a/src/ts-node/README.md +++ b/src/ts-node/README.md @@ -1,7 +1,7 @@ # ts-node (via npm) (ts-node) -ts-node is a TypeScript execution engine and REPL for Node.js. It JIT transforms TypeScript into JavaScript, enabling you to directly execute TypeScript on Node.js without precompiling. +ts-node is a TypeScript execution engine and REPL for Node.js. It JIT transforms TypeScript into JavaScript, enabling you to directly execute TypeScript on Node.js without precompiling. ## Example DevContainer Usage diff --git a/src/ts-node/install.sh b/src/ts-node/install.sh index eeea8eb2a..1173282f1 100755 --- a/src/ts-node/install.sh +++ b/src/ts-node/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='ts-node' --option version="$VERSION" - + echo 'Done!' diff --git a/src/ts-node/library_scripts.sh b/src/ts-node/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/ts-node/library_scripts.sh +++ b/src/ts-node/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/tsx/install.sh b/src/tsx/install.sh index 5b644dd34..c2ccb2af4 100755 --- a/src/tsx/install.sh +++ b/src/tsx/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='tsx' --option version="$VERSION" - + echo 'Done!' diff --git a/src/tsx/library_scripts.sh b/src/tsx/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/tsx/library_scripts.sh +++ b/src/tsx/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/turborepo-npm/install.sh b/src/turborepo-npm/install.sh index b2ca986ca..6baf72d22 100755 --- a/src/turborepo-npm/install.sh +++ b/src/turborepo-npm/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='turbo' --option version="$VERSION" - + echo 'Done!' diff --git a/src/turborepo-npm/library_scripts.sh b/src/turborepo-npm/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/turborepo-npm/library_scripts.sh +++ b/src/turborepo-npm/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/twine/install.sh b/src/twine/install.sh index 5bbaf0e76..f5332f789 100755 --- a/src/twine/install.sh +++ b/src/twine/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='twine' --option version="$VERSION" - + echo 'Done!' diff --git a/src/twine/library_scripts.sh b/src/twine/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/twine/library_scripts.sh +++ b/src/twine/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/typescript/install.sh b/src/typescript/install.sh index 29f2a69e2..bb865107e 100755 --- a/src/typescript/install.sh +++ b/src/typescript/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='typescript' --option version="$VERSION" - + echo 'Done!' diff --git a/src/typescript/library_scripts.sh b/src/typescript/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/typescript/library_scripts.sh +++ b/src/typescript/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/typst/install.sh b/src/typst/install.sh index d9b892f02..3227142a0 100755 --- a/src/typst/install.sh +++ b/src/typst/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='typst/typst' --option binaryNames='typst' --option version="$VERSION" - + echo 'Done!' diff --git a/src/typst/library_scripts.sh b/src/typst/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/typst/library_scripts.sh +++ b/src/typst/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/ufmt/library_scripts.sh b/src/ufmt/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/ufmt/library_scripts.sh +++ b/src/ufmt/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/upx/install.sh b/src/upx/install.sh index 86010ae2f..12b752d93 100755 --- a/src/upx/install.sh +++ b/src/upx/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='upx/upx' --option binaryNames='upx' --option version="$VERSION" - + echo 'Done!' diff --git a/src/upx/library_scripts.sh b/src/upx/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/upx/library_scripts.sh +++ b/src/upx/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vault-asdf/install.sh b/src/vault-asdf/install.sh index 7a4c9e895..2371d3b68 100755 --- a/src/vault-asdf/install.sh +++ b/src/vault-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='vault' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vault-asdf/library_scripts.sh b/src/vault-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/vault-asdf/library_scripts.sh +++ b/src/vault-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vercel-cli/install.sh b/src/vercel-cli/install.sh index 6ca3b9cd6..2abee735c 100755 --- a/src/vercel-cli/install.sh +++ b/src/vercel-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='vercel' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vercel-cli/library_scripts.sh b/src/vercel-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vercel-cli/library_scripts.sh +++ b/src/vercel-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vercel-ncc/install.sh b/src/vercel-ncc/install.sh index b25ae4e03..791ff077b 100755 --- a/src/vercel-ncc/install.sh +++ b/src/vercel-ncc/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@vercel/ncc' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vercel-ncc/library_scripts.sh b/src/vercel-ncc/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vercel-ncc/library_scripts.sh +++ b/src/vercel-ncc/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vercel-pkg/install.sh b/src/vercel-pkg/install.sh index 3be83a273..a2a0d15cd 100755 --- a/src/vercel-pkg/install.sh +++ b/src/vercel-pkg/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='pkg' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vercel-pkg/library_scripts.sh b/src/vercel-pkg/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vercel-pkg/library_scripts.sh +++ b/src/vercel-pkg/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vercel-release/install.sh b/src/vercel-release/install.sh index 67f5b4a87..21570dac7 100755 --- a/src/vercel-release/install.sh +++ b/src/vercel-release/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='release' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vercel-release/library_scripts.sh b/src/vercel-release/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vercel-release/library_scripts.sh +++ b/src/vercel-release/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vercel-serve/install.sh b/src/vercel-serve/install.sh index 68e07af2e..1f2a4dd03 100755 --- a/src/vercel-serve/install.sh +++ b/src/vercel-serve/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='serve' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vercel-serve/library_scripts.sh b/src/vercel-serve/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vercel-serve/library_scripts.sh +++ b/src/vercel-serve/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vscode-cli/library_scripts.sh b/src/vscode-cli/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/vscode-cli/library_scripts.sh +++ b/src/vscode-cli/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vscode-server/library_scripts.sh b/src/vscode-server/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/vscode-server/library_scripts.sh +++ b/src/vscode-server/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vtop/install.sh b/src/vtop/install.sh index 39584f69b..166acd2a9 100755 --- a/src/vtop/install.sh +++ b/src/vtop/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='vtop' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vtop/library_scripts.sh b/src/vtop/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vtop/library_scripts.sh +++ b/src/vtop/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vue-cli/install.sh b/src/vue-cli/install.sh index d1a68ded7..17b801a61 100755 --- a/src/vue-cli/install.sh +++ b/src/vue-cli/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.5" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/npm-package:1.0.3" \ --option package='@vue/cli' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vue-cli/library_scripts.sh b/src/vue-cli/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vue-cli/library_scripts.sh +++ b/src/vue-cli/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/vulture/install.sh b/src/vulture/install.sh index a84afc9bd..2b10afb9d 100755 --- a/src/vulture/install.sh +++ b/src/vulture/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='vulture' --option version="$VERSION" - + echo 'Done!' diff --git a/src/vulture/library_scripts.sh b/src/vulture/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/vulture/library_scripts.sh +++ b/src/vulture/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/w3m-apt-get/install.sh b/src/w3m-apt-get/install.sh index 1bc09efd9..d675684b4 100755 --- a/src/w3m-apt-get/install.sh +++ b/src/w3m-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/w3m-apt-get/library_scripts.sh b/src/w3m-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/w3m-apt-get/library_scripts.sh +++ b/src/w3m-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/w3m-homebrew/install.sh b/src/w3m-homebrew/install.sh index cda6d9006..e742f85d9 100755 --- a/src/w3m-homebrew/install.sh +++ b/src/w3m-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/w3m-homebrew/library_scripts.sh b/src/w3m-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/w3m-homebrew/library_scripts.sh +++ b/src/w3m-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/waypoint-asdf/install.sh b/src/waypoint-asdf/install.sh index 3382cc6d1..139d95f04 100755 --- a/src/waypoint-asdf/install.sh +++ b/src/waypoint-asdf/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.45" @@ -17,7 +17,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/asdf-package:1.0.5" \ --option plugin='waypoint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/waypoint-asdf/library_scripts.sh b/src/waypoint-asdf/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/waypoint-asdf/library_scripts.sh +++ b/src/waypoint-asdf/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/webtau-sdkman/install.sh b/src/webtau-sdkman/install.sh index 6499b518f..21bae4c8d 100755 --- a/src/webtau-sdkman/install.sh +++ b/src/webtau-sdkman/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/webtau-sdkman/library_scripts.sh b/src/webtau-sdkman/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/webtau-sdkman/library_scripts.sh +++ b/src/webtau-sdkman/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/wget-apt-get/install.sh b/src/wget-apt-get/install.sh index 65e4bd4b0..4aa36ef56 100755 --- a/src/wget-apt-get/install.sh +++ b/src/wget-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/wget-apt-get/library_scripts.sh b/src/wget-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/wget-apt-get/library_scripts.sh +++ b/src/wget-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/wget-homebrew/install.sh b/src/wget-homebrew/install.sh index 0d5131ff2..7c54dc12e 100755 --- a/src/wget-homebrew/install.sh +++ b/src/wget-homebrew/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.39" diff --git a/src/wget-homebrew/library_scripts.sh b/src/wget-homebrew/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/wget-homebrew/library_scripts.sh +++ b/src/wget-homebrew/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/wireguard-apt-get/install.sh b/src/wireguard-apt-get/install.sh index d781eff42..895760228 100755 --- a/src/wireguard-apt-get/install.sh +++ b/src/wireguard-apt-get/install.sh @@ -6,8 +6,8 @@ source ./library_scripts.sh # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.4.29" diff --git a/src/wireguard-apt-get/library_scripts.sh b/src/wireguard-apt-get/library_scripts.sh index 8f9bd9a0e..5384c58d6 100644 --- a/src/wireguard-apt-get/library_scripts.sh +++ b/src/wireguard-apt-get/library_scripts.sh @@ -3,14 +3,14 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine - + url=$1 output_location=$2 tempdir=$(mktemp -d) @@ -19,8 +19,8 @@ clean_download() { function _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -38,8 +38,8 @@ clean_download() { function _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -48,7 +48,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -76,7 +76,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -90,7 +90,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -135,7 +135,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [[ -z "${nanolayer_location}" ]]; then if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then @@ -148,7 +148,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -159,11 +159,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/xmrig/install.sh b/src/xmrig/install.sh index c62fccd83..910148b19 100755 --- a/src/xmrig/install.sh +++ b/src/xmrig/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.19" \ --option repo='xmrig/xmrig' --option binaryNames='xmrig' --option version="$VERSION" - + echo 'Done!' diff --git a/src/xmrig/library_scripts.sh b/src/xmrig/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/xmrig/library_scripts.sh +++ b/src/xmrig/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/xonsh/install.sh b/src/xonsh/install.sh index 732f6fe77..30d6cda91 100755 --- a/src/xonsh/install.sh +++ b/src/xonsh/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='xonsh[all]' --option version="$VERSION" - + echo 'Done!' diff --git a/src/xonsh/library_scripts.sh b/src/xonsh/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/xonsh/library_scripts.sh +++ b/src/xonsh/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/yamllint/install.sh b/src/yamllint/install.sh index 484aa3a67..7fdda6048 100755 --- a/src/yamllint/install.sh +++ b/src/yamllint/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yamllint' --option version="$VERSION" - + echo 'Done!' diff --git a/src/yamllint/library_scripts.sh b/src/yamllint/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/yamllint/library_scripts.sh +++ b/src/yamllint/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/yapf/install.sh b/src/yapf/install.sh index bc683713b..ac5e3f5e6 100755 --- a/src/yapf/install.sh +++ b/src/yapf/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yapf' --option version="$VERSION" - + echo 'Done!' diff --git a/src/yapf/library_scripts.sh b/src/yapf/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/yapf/library_scripts.sh +++ b/src/yapf/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/youtube-dl/install.sh b/src/youtube-dl/install.sh index 7c53a1e9a..bb3c12f50 100755 --- a/src/youtube-dl/install.sh +++ b/src/youtube-dl/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='youtube-dl' --option version="$VERSION" - + echo 'Done!' diff --git a/src/youtube-dl/library_scripts.sh b/src/youtube-dl/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/youtube-dl/library_scripts.sh +++ b/src/youtube-dl/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/youtubeuploader/install.sh b/src/youtubeuploader/install.sh index f9ace71ad..beda88a9d 100755 --- a/src/youtubeuploader/install.sh +++ b/src/youtubeuploader/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.4" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ --option repo='porjo/youtubeuploader' --option binaryNames='youtubeuploader' --option version="$VERSION" - + echo 'Done!' diff --git a/src/youtubeuploader/library_scripts.sh b/src/youtubeuploader/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/youtubeuploader/library_scripts.sh +++ b/src/youtubeuploader/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/yt-dlp/install.sh b/src/yt-dlp/install.sh index 58ea66a6b..b52c6f7e2 100755 --- a/src/yt-dlp/install.sh +++ b/src/yt-dlp/install.sh @@ -5,8 +5,8 @@ set -e # nanolayer is a cli utility which keeps container layers as small as possible # source code: https://github.com/devcontainers-contrib/nanolayer -# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, -# and if missing - will download a temporary copy that automatically get deleted at the end +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end # of the script ensure_nanolayer nanolayer_location "v0.5.0" @@ -16,7 +16,7 @@ $nanolayer_location \ devcontainer-feature \ "ghcr.io/devcontainers-extra/features/pipx-package:1.1.8" \ --option package='yt-dlp' --option version="$VERSION" - + echo 'Done!' diff --git a/src/yt-dlp/library_scripts.sh b/src/yt-dlp/library_scripts.sh index 0d7f34d4b..ed393e86f 100644 --- a/src/yt-dlp/library_scripts.sh +++ b/src/yt-dlp/library_scripts.sh @@ -2,11 +2,11 @@ clean_download() { # The purpose of this function is to download a file with minimal impact on container layer size - # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a - # temporary manner, and making sure to + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to # 1. uninstall the downloader at the return of the function # 2. revert back any changes to the package installer database/cache (for example apt-get lists) - # The above steps will minimize the leftovers being created while installing the downloader + # The above steps will minimize the leftovers being created while installing the downloader # Supported distros: # debian/ubuntu/alpine @@ -18,8 +18,8 @@ clean_download() { _apt_get_install() { tempdir=$1 - # copy current state of apt list - in order to revert back later (minimize contianer layer size) - cp -p -R /var/lib/apt/lists $tempdir + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir apt-get update -y apt-get -y install --no-install-recommends wget ca-certificates } @@ -37,8 +37,8 @@ clean_download() { _apk_install() { tempdir=$1 - # copy current state of apk cache - in order to revert back later (minimize contianer layer size) - cp -p -R /var/cache/apk $tempdir + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir apk add --no-cache wget } @@ -47,7 +47,7 @@ clean_download() { tempdir=$1 echo "removing wget" - apk del wget + apk del wget } # try to use either wget or curl if one of them already installer if type curl >/dev/null 2>&1; then @@ -75,7 +75,7 @@ clean_download() { if [ $downloader = "wget" ] ; then wget -q $url -O $output_location else - curl -sfL $url -o $output_location + curl -sfL $url -o $output_location fi # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because @@ -89,7 +89,7 @@ clean_download() { echo "distro not supported" exit 1 fi - fi + fi } @@ -128,7 +128,7 @@ ensure_nanolayer() { fi - # If not previuse installation found, download it temporarly and delete at the end of the script + # If not previuse installation found, download it temporarly and delete at the end of the script if [ -z "${__nanolayer_location}" ]; then if [ "$(uname -sm)" = 'Linux x86_64' ] || [ "$(uname -sm)" = "Linux aarch64" ]; then @@ -141,7 +141,7 @@ ensure_nanolayer() { } trap clean_up EXIT - + if [ -x "/sbin/apk" ] ; then clib_type=musl else @@ -152,11 +152,11 @@ ensure_nanolayer() { # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename - + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" chmod a+x $tmp_dir/nanolayer __nanolayer_location=$tmp_dir/nanolayer - + else echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" diff --git a/src/zsh-plugins/install.sh b/src/zsh-plugins/install.sh index 27deafd90..3f56cb3f9 100755 --- a/src/zsh-plugins/install.sh +++ b/src/zsh-plugins/install.sh @@ -22,7 +22,7 @@ check_packages git ca-certificates # ensure zsh is installed if ! type zsh >/dev/null 2>&1; then check_packages zsh -fi +fi if [ "$USERNAME" = "root" ]; then USER_LOCATION="/root" @@ -56,7 +56,7 @@ cd "$currdir" || exit # create configuration file if not exists if ! [ -f "$ZSH_CONFIG" ]; then mkdir -p "$(dirname "$ZSH_CONFIG")" && touch "$ZSH_CONFIG" -fi +fi # Activate zsh plugins from PLUGINS sed -i -e "s/plugins=.*/plugins=(git ${PLUGINS})/g" "$ZSH_CONFIG" diff --git a/test/asdf-package/install_terraform_alpine.sh b/test/asdf-package/install_terraform_alpine.sh index ebf736deb..456d59618 100755 --- a/test/asdf-package/install_terraform_alpine.sh +++ b/test/asdf-package/install_terraform_alpine.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash set -e diff --git a/test/npm-package/scenarios.json b/test/npm-package/scenarios.json index 6f2ba5ffa..1814d53a2 100644 --- a/test/npm-package/scenarios.json +++ b/test/npm-package/scenarios.json @@ -22,7 +22,7 @@ "features": { "npm-package": { "version": "8.3.1", - "package": "pnpm" + "package": "pnpm" } } }, @@ -31,7 +31,7 @@ "features": { "npm-package": { "version": "7.26.3", - "package": "pnpm" + "package": "pnpm" } } } From 23363a6889843d55aba5c0ce7f605b9631ff9729 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 29 Sep 2024 09:34:16 +0000 Subject: [PATCH 24/38] chore(gitignore): add output dir --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index ec803e98d..1e5e275ef 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +output/ + # bellow is merge of vscode and node git ignores .vscode/* From f8d2cc029c06553bb828dcecf12ed92eae43011c Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Mon, 30 Sep 2024 16:55:56 +0000 Subject: [PATCH 25/38] chore: regenerate READMEs --- src/act-asdf/README.md | 8 ++++++-- src/act/README.md | 8 ++++++-- src/actionlint/README.md | 8 ++++++-- src/actions-runner/README.md | 8 ++++++-- src/activemq-sdkman/README.md | 8 ++++++-- src/akamai-cli/README.md | 8 ++++++-- src/alertmanager/README.md | 8 ++++++-- src/alp-asdf/README.md | 8 ++++++-- src/amplify-cli/README.md | 8 ++++++-- src/angular-cli/README.md | 8 ++++++-- src/ansible/README.md | 8 ++++++-- src/ant-sdkman/README.md | 8 ++++++-- src/apko/README.md | 8 ++++++-- src/apt-get-packages/README.md | 14 +++++++++----- src/apt-packages/README.md | 14 +++++++++----- src/argo-cd/README.md | 4 ++-- src/argo-workflows/README.md | 4 ++-- src/asciidoctorj-sdkman/README.md | 8 ++++++-- src/asdf-package/README.md | 5 +++-- src/assemblyscript/README.md | 8 ++++++-- src/atlantis/README.md | 8 ++++++-- src/atmos/README.md | 8 ++++++-- src/auditjs/README.md | 8 ++++++-- src/autoenv/README.md | 8 ++++++-- src/aws-cdk/README.md | 8 ++++++-- src/aws-eb-cli/README.md | 8 ++++++-- src/aztfexport/README.md | 8 ++++++-- src/azure-apiops/README.md | 8 ++++++-- src/ballerina-sdkman/README.md | 8 ++++++-- src/bandit/README.md | 8 ++++++-- src/bartib/README.md | 8 ++++++-- src/bash-command/README.md | 4 ++-- src/beehive/README.md | 8 ++++++-- src/bigcommerce-stencil-cli/README.md | 8 ++++++-- src/bikeshed/README.md | 8 ++++++-- src/bin/README.md | 8 ++++++-- src/black/README.md | 8 ++++++-- src/blackbox-exporter/README.md | 8 ++++++-- src/bomber/README.md | 8 ++++++-- src/bower/README.md | 8 ++++++-- src/bpipe-sdkman/README.md | 8 ++++++-- src/brownie/README.md | 8 ++++++-- src/browserify/README.md | 8 ++++++-- src/btop-homebrew/README.md | 8 ++++++-- src/btrace-sdkman/README.md | 8 ++++++-- src/budibase-cli/README.md | 8 ++++++-- src/buku/README.md | 8 ++++++-- src/caddy/README.md | 8 ++++++-- src/ccache-asdf/README.md | 8 ++++++-- src/checkov/README.md | 8 ++++++-- src/chezscheme-asdf/README.md | 8 ++++++-- src/chisel/README.md | 8 ++++++-- src/circleci-cli/README.md | 8 ++++++-- src/clojure-asdf/README.md | 8 ++++++-- src/cloud-nuke/README.md | 8 ++++++-- src/cloudflare-wrangler/README.md | 8 ++++++-- src/cloudflared-fips/README.md | 8 ++++++-- src/cloudflared/README.md | 8 ++++++-- src/cloudinary-cli/README.md | 8 ++++++-- src/codefresh-cli/README.md | 8 ++++++-- src/composer/README.md | 8 ++++++-- src/concurnas-sdkman/README.md | 8 ++++++-- src/connor-sdkman/README.md | 8 ++++++-- src/consul-asdf/README.md | 8 ++++++-- src/consul-exporter/README.md | 8 ++++++-- src/cookiecutter/README.md | 8 ++++++-- src/copier/README.md | 8 ++++++-- src/corepack/README.md | 8 ++++++-- src/cosign/README.md | 8 ++++++-- src/coverage-py/README.md | 8 ++++++-- src/crystal-asdf/README.md | 8 ++++++-- src/cuba-sdkman/README.md | 8 ++++++-- src/curl-apt-get/README.md | 13 +++++++++++-- src/curl-homebrew/README.md | 8 ++++++-- src/cve-bin-tool/README.md | 8 ++++++-- src/cxf-sdkman/README.md | 8 ++++++-- src/cyclonedx-cli/README.md | 8 ++++++-- src/cyclonedx-python/README.md | 8 ++++++-- src/cz-cli/README.md | 8 ++++++-- src/d/README.md | 4 ++-- src/dasel-asdf/README.md | 8 ++++++-- src/dashlane-cli/README.md | 8 ++++++-- src/datadog-ci-cli/README.md | 8 ++++++-- src/datasette/README.md | 8 ++++++-- src/dbt-coverage/README.md | 8 ++++++-- src/ddgr-apt-get/README.md | 13 +++++++++++-- src/ddgr-homebrew/README.md | 8 ++++++-- src/deno-asdf/README.md | 8 ++++++-- src/deno/README.md | 4 ++-- src/devcontainers-cli/README.md | 8 ++++++-- src/digitalocean-cli/README.md | 4 ++-- src/direnv-asdf/README.md | 8 ++++++-- src/direnv/README.md | 8 ++++++-- src/dive/README.md | 8 ++++++-- src/dnote/README.md | 8 ++++++-- src/doctoolchain-sdkman/README.md | 8 ++++++-- src/dprint-asdf/README.md | 8 ++++++-- src/driftctl/README.md | 8 ++++++-- src/drone-cli/README.md | 8 ++++++-- src/dua/README.md | 8 ++++++-- src/duf/README.md | 8 ++++++-- src/dufs/README.md | 8 ++++++-- src/eas-cli/README.md | 8 ++++++-- src/eget/README.md | 8 ++++++-- src/elasticsearch-asdf/README.md | 8 ++++++-- src/elm-asdf/README.md | 8 ++++++-- src/ember-cli/README.md | 8 ++++++-- src/envoy/README.md | 8 ++++++-- src/epinio/README.md | 8 ++++++-- src/etcd/README.md | 8 ++++++-- src/exa/README.md | 8 ++++++-- src/exercism-cli/README.md | 8 ++++++-- src/expo-cli/README.md | 8 ++++++-- src/express-generator/README.md | 8 ++++++-- src/fd/README.md | 8 ++++++-- src/ffmpeg-apt-get/README.md | 13 +++++++++++-- src/firebase-cli/README.md | 8 ++++++-- src/fish-apt-get/README.md | 13 +++++++++++-- src/fkill/README.md | 8 ++++++-- src/flake8/README.md | 8 ++++++-- src/flink-sdkman/README.md | 8 ++++++-- src/flit/README.md | 8 ++++++-- src/former2-cli/README.md | 8 ++++++-- src/fossil-apt-get/README.md | 13 +++++++++++-- src/fossil-homebrew/README.md | 8 ++++++-- src/fulcio/README.md | 8 ++++++-- src/fzf/README.md | 8 ++++++-- src/gaiden-sdkman/README.md | 8 ++++++-- src/ganache/README.md | 8 ++++++-- src/gdbgui/README.md | 8 ++++++-- src/gh-cli/README.md | 8 ++++++-- src/git-lfs/README.md | 8 ++++++-- src/gitmux/README.md | 8 ++++++-- src/gitomatic/README.md | 8 ++++++-- src/gitsign-credential-cache/README.md | 8 ++++++-- src/gitsign/README.md | 8 ++++++-- src/gitty/README.md | 8 ++++++-- src/glances/README.md | 8 ++++++-- src/gleam/README.md | 8 ++++++-- src/go-task/README.md | 8 ++++++-- src/graalvm-asdf/README.md | 8 ++++++-- src/gradle-sdkman/README.md | 8 ++++++-- src/gradleprofiler-sdkman/README.md | 8 ++++++-- src/grails-sdkman/README.md | 8 ++++++-- src/graphite-exporter/README.md | 8 ++++++-- src/groovy-sdkman/README.md | 8 ++++++-- src/groovyserv-sdkman/README.md | 8 ++++++-- src/grpcurl-asdf/README.md | 8 ++++++-- src/grype/README.md | 8 ++++++-- src/gulp-cli/README.md | 8 ++++++-- src/hadoop-sdkman/README.md | 8 ++++++-- src/haskell/README.md | 4 ++-- src/hatch/README.md | 8 ++++++-- src/haxe-asdf/README.md | 8 ++++++-- src/heroku-cli/README.md | 4 ++-- src/hotel/README.md | 8 ++++++-- src/how2/README.md | 8 ++++++-- src/http-server/README.md | 8 ++++++-- src/http4k-sdkman/README.md | 8 ++++++-- src/hyperfine/README.md | 8 ++++++-- src/immuadmin-fips/README.md | 8 ++++++-- src/immuadmin/README.md | 8 ++++++-- src/immuclient-fips/README.md | 8 ++++++-- src/immuclient/README.md | 8 ++++++-- src/immudb-fips/README.md | 8 ++++++-- src/immudb/README.md | 8 ++++++-- src/infracost/README.md | 8 ++++++-- src/infrastructor-sdkman/README.md | 8 ++++++-- src/invoke/README.md | 8 ++++++-- src/ionic-cli/README.md | 8 ++++++-- src/isort/README.md | 8 ++++++-- src/istioctl/README.md | 8 ++++++-- src/jake/README.md | 8 ++++++-- src/jbake-sdkman/README.md | 8 ++++++-- src/jbang-sdkman/README.md | 8 ++++++-- src/jenkinsx-cli/README.md | 8 ++++++-- src/jest/README.md | 8 ++++++-- src/jfrog-cli-homebrew/README.md | 8 ++++++-- src/jfrog-cli-npm/README.md | 8 ++++++-- src/jfrog-cli/README.md | 13 +++++++++++-- src/jira-cli/README.md | 8 ++++++-- src/jmc-sdkman/README.md | 8 ++++++-- src/jmeter-sdkman/README.md | 8 ++++++-- src/joern-sdkman/README.md | 8 ++++++-- src/jreleaser-sdkman/README.md | 8 ++++++-- src/jrnl/README.md | 8 ++++++-- src/jshint/README.md | 8 ++++++-- src/jsii-diff/README.md | 8 ++++++-- src/jsii-pacmak/README.md | 8 ++++++-- src/jsii-rosetta/README.md | 8 ++++++-- src/jsii/README.md | 8 ++++++-- src/json-server/README.md | 8 ++++++-- src/k2tf/README.md | 8 ++++++-- src/k6/README.md | 8 ++++++-- src/karaf-sdkman/README.md | 8 ++++++-- src/keepercommander/README.md | 8 ++++++-- src/ki-sdkman/README.md | 8 ++++++-- src/kind/README.md | 8 ++++++-- src/kobweb-sdkman/README.md | 8 ++++++-- src/kops/README.md | 8 ++++++-- src/kotlin-sdkman/README.md | 8 ++++++-- src/kscript-sdkman/README.md | 8 ++++++-- src/kubeclarity-cli/README.md | 8 ++++++-- src/kubectl-asdf/README.md | 8 ++++++-- src/kubectx-kubens/README.md | 8 ++++++-- src/kubie/README.md | 4 ++-- src/kyverno-cli/README.md | 8 ++++++-- src/lastpass-cli-homebrew/README.md | 8 ++++++-- src/layrry-sdkman/README.md | 8 ++++++-- src/lean-asdf/README.md | 8 ++++++-- src/lefthook-asdf/README.md | 8 ++++++-- src/leiningen-sdkman/README.md | 8 ++++++-- src/lektor/README.md | 8 ++++++-- src/lerna-npm/README.md | 8 ++++++-- src/less/README.md | 8 ++++++-- src/levant-asdf/README.md | 8 ++++++-- src/lighthouse-cli/README.md | 8 ++++++-- src/linkerd2-cli-edge/README.md | 8 ++++++-- src/linkerd2-cli-stable/README.md | 8 ++++++-- src/linode-cli/README.md | 8 ++++++-- src/lite-server/README.md | 8 ++++++-- src/live-server/README.md | 8 ++++++-- src/localstack/README.md | 8 ++++++-- src/localtunnel-npm/README.md | 8 ++++++-- src/mackup/README.md | 8 ++++++-- src/markdownlint-cli/README.md | 8 ++++++-- src/markdownlint-cli2/README.md | 8 ++++++-- src/maven-sdkman/README.md | 8 ++++++-- src/meltano/README.md | 8 ++++++-- src/memcached-exporter/README.md | 8 ++++++-- src/micro/README.md | 13 +++++++++++-- src/micronaut-sdkman/README.md | 8 ++++++-- src/mitmproxy/README.md | 8 ++++++-- src/mkcert/README.md | 8 ++++++-- src/mkdocs/README.md | 8 ++++++-- src/mlocate-apt-get/README.md | 13 +++++++++++-- src/mlton-asdf/README.md | 8 ++++++-- src/mocha/README.md | 8 ++++++-- src/mongodb-atlas-cli-homebrew/README.md | 8 ++++++-- src/mongosh-homebrew/README.md | 8 ++++++-- src/mosh-apt-get/README.md | 13 +++++++++++-- src/mosh-homebrew/README.md | 8 ++++++-- src/mulefd-sdkman/README.md | 8 ++++++-- src/mvnd-sdkman/README.md | 8 ++++++-- src/mybatis-sdkman/README.md | 8 ++++++-- src/mypy/README.md | 8 ++++++-- src/mysql-homebrew/README.md | 8 ++++++-- src/mysqld-exporter/README.md | 8 ++++++-- src/n8n/README.md | 8 ++++++-- src/nancy/README.md | 8 ++++++-- src/navi/README.md | 8 ++++++-- src/ncdu/README.md | 13 +++++++++++-- src/neko-asdf/README.md | 8 ++++++-- src/neo4jmigrations-sdkman/README.md | 8 ++++++-- src/neofetch/README.md | 13 +++++++++++-- src/neovim-apt-get/README.md | 13 +++++++++++-- src/neovim-homebrew/README.md | 8 ++++++-- src/nestjs-cli/README.md | 8 ++++++-- src/netdata/README.md | 13 +++++++++++-- src/netlify-cli/README.md | 8 ++++++-- src/nim-asdf/README.md | 8 ++++++-- src/ninja-asdf/README.md | 8 ++++++-- src/nmap-apt-get/README.md | 13 +++++++++++-- src/nmap-homebrew/README.md | 8 ++++++-- src/nnn-apt-get/README.md | 13 +++++++++++-- src/nnn-homebrew/README.md | 8 ++++++-- src/node-asdf/README.md | 8 ++++++-- src/node-exporter/README.md | 8 ++++++-- src/nomad-asdf/README.md | 8 ++++++-- src/nox/README.md | 8 ++++++-- src/npm-package/README.md | 8 ++++---- src/nx-npm/README.md | 8 ++++++-- src/ocaml-asdf/README.md | 8 ++++++-- src/oclif/README.md | 8 ++++++-- src/opa/README.md | 8 ++++++-- src/opam-asdf/README.md | 8 ++++++-- src/ory-cli/README.md | 8 ++++++-- src/ory-hydra/README.md | 8 ++++++-- src/ory-kratos/README.md | 8 ++++++-- src/ory-oathkeeper/README.md | 8 ++++++-- src/packer-asdf/README.md | 8 ++++++-- src/pandoc/README.md | 8 ++++++-- src/pass-apt-get/README.md | 13 +++++++++++-- src/pdm/README.md | 8 ++++++-- src/peco-asdf/README.md | 8 ++++++-- src/perl-asdf/README.md | 8 ++++++-- src/pierrot-sdkman/README.md | 8 ++++++-- src/pipenv/README.md | 8 ++++++-- src/pipx-package/README.md | 4 +++- src/pnpm/README.md | 8 ++++++-- src/podman-homebrew/README.md | 8 ++++++-- src/poetry/README.md | 8 ++++++-- src/pomchecker-sdkman/README.md | 8 ++++++-- src/poppler-utils-apt-get/README.md | 13 +++++++++++-- src/powerbi-visuals-tools/README.md | 8 ++++++-- src/powershell/README.md | 8 ++++++-- src/pre-commit/README.md | 8 ++++++-- src/prettier/README.md | 8 ++++++-- src/prisma/README.md | 8 ++++++-- src/projen/README.md | 8 ++++++-- src/prometheus/README.md | 8 ++++++-- src/promlens/README.md | 8 ++++++-- src/protoc-asdf/README.md | 8 ++++++-- src/protoc/README.md | 8 ++++++-- src/pulumi/README.md | 4 ++-- src/pushgateway/README.md | 8 ++++++-- src/pyinfra/README.md | 8 ++++++-- src/pylint/README.md | 8 ++++++-- src/pyoxidizer/README.md | 8 ++++++-- src/pyscaffold/README.md | 8 ++++++-- src/qrcode/README.md | 8 ++++++-- src/quarkus-sdkman/README.md | 8 ++++++-- src/quasar-cli/README.md | 8 ++++++-- src/raku-asdf/README.md | 8 ++++++-- src/rclone/README.md | 8 ++++++-- src/redis-homebrew/README.md | 8 ++++++-- src/rekor-cli/README.md | 8 ++++++-- src/renovate-cli/README.md | 8 ++++++-- src/ripgrep/README.md | 8 ++++++-- src/rollup/README.md | 8 ++++++-- src/ruby-asdf/README.md | 8 ++++++-- src/ruff/README.md | 8 ++++++-- src/salesforce-cli/README.md | 8 ++++++-- src/salesforce-sfdx/README.md | 8 ++++++-- src/sanity-cli/README.md | 8 ++++++-- src/sap-piper/README.md | 8 ++++++-- src/sbt-sdkman/README.md | 8 ++++++-- src/scala-sdkman/README.md | 8 ++++++-- src/scalacli-sdkman/README.md | 8 ++++++-- src/scancode-toolkit/README.md | 8 ++++++-- src/schemacrawler-sdkman/README.md | 8 ++++++-- src/sentinel-asdf/README.md | 8 ++++++-- src/serf-asdf/README.md | 8 ++++++-- src/shfmt/README.md | 10 ++++++++-- src/shopify-cli/README.md | 8 ++++++-- src/sigstore-python/README.md | 8 ++++++-- src/snyk-cli/README.md | 8 ++++++-- src/sops/README.md | 8 ++++++-- src/spacectl/README.md | 8 ++++++-- src/spark-sdkman/README.md | 8 ++++++-- src/spicedb/README.md | 8 ++++++-- src/springboot-sdkman/README.md | 8 ++++++-- src/sqlfluff/README.md | 8 ++++++-- src/squarespace-server/README.md | 8 ++++++-- src/sshoogr-sdkman/README.md | 8 ++++++-- src/starship-homebrew/README.md | 8 ++++++-- src/starship/README.md | 8 ++++++-- src/statsd-exporter/README.md | 8 ++++++-- src/stew/README.md | 8 ++++++-- src/supabase-cli/README.md | 8 ++++++-- src/surge-cli/README.md | 8 ++++++-- src/sv2v/README.md | 8 ++++++-- src/svu-asdf/README.md | 8 ++++++-- src/syft/README.md | 8 ++++++-- src/syntaqx-serve/README.md | 8 ++++++-- src/tailscale/README.md | 13 +++++++++++-- src/taxi-sdkman/README.md | 8 ++++++-- src/tea/README.md | 8 ++++++-- src/tekton-cli/README.md | 8 ++++++-- src/tempo/README.md | 8 ++++++-- src/temporal-cli/README.md | 8 ++++++-- src/terracognita/README.md | 8 ++++++-- src/terraform-asdf/README.md | 8 ++++++-- src/terraform-docs/README.md | 8 ++++++-- src/terraform-ls-asdf/README.md | 8 ++++++-- src/terraformer/README.md | 8 ++++++-- src/terragrunt/README.md | 8 ++++++-- src/terramate/README.md | 8 ++++++-- src/terrascan/README.md | 4 ++-- src/tfc-agent-asdf/README.md | 8 ++++++-- src/tfcdk-cli/README.md | 8 ++++++-- src/tfenv-homebrew/README.md | 8 ++++++-- src/tfsec/README.md | 8 ++++++-- src/tfswitch/README.md | 13 +++++++++++-- src/tldr/README.md | 8 ++++++-- src/tmate/README.md | 13 +++++++++++-- src/tmux-apt-get/README.md | 13 +++++++++++-- src/tmux-homebrew/README.md | 8 ++++++-- src/tomcat-sdkman/README.md | 8 ++++++-- src/tooljet-cli/README.md | 8 ++++++-- src/toolkit-sdkman/README.md | 8 ++++++-- src/tox/README.md | 8 ++++++-- src/trello-cli/README.md | 8 ++++++-- src/tridentctl-asdf/README.md | 8 ++++++-- src/truffle/README.md | 8 ++++++-- src/ts-node/README.md | 8 ++++++-- src/tsx/README.md | 8 ++++++-- src/turborepo-npm/README.md | 8 ++++++-- src/twine/README.md | 8 ++++++-- src/typescript/README.md | 8 ++++++-- src/typst/README.md | 8 ++++++-- src/ufmt/README.md | 8 ++++++-- src/upx/README.md | 8 ++++++-- src/vault-asdf/README.md | 8 ++++++-- src/vercel-cli/README.md | 8 ++++++-- src/vercel-ncc/README.md | 8 ++++++-- src/vercel-pkg/README.md | 8 ++++++-- src/vercel-release/README.md | 8 ++++++-- src/vercel-serve/README.md | 8 ++++++-- src/vscode-cli/README.md | 13 +++++++------ src/vscode-server/README.md | 10 ++++++---- src/vtop/README.md | 8 ++++++-- src/vue-cli/README.md | 8 ++++++-- src/vulture/README.md | 8 ++++++-- src/w3m-apt-get/README.md | 13 +++++++++++-- src/w3m-homebrew/README.md | 8 ++++++-- src/waypoint-asdf/README.md | 8 ++++++-- src/webtau-sdkman/README.md | 8 ++++++-- src/wget-apt-get/README.md | 13 +++++++++++-- src/wget-homebrew/README.md | 8 ++++++-- src/wireguard-apt-get/README.md | 13 +++++++++++-- src/xmrig/README.md | 8 ++++++-- src/xonsh/README.md | 8 ++++++-- src/yamllint/README.md | 8 ++++++-- src/yapf/README.md | 8 ++++++-- src/youtube-dl/README.md | 8 ++++++-- src/youtubeuploader/README.md | 8 ++++++-- src/yt-dlp/README.md | 8 ++++++-- src/zig/README.md | 4 ++-- src/zsh-plugins/README.md | 4 ++-- 420 files changed, 2589 insertions(+), 853 deletions(-) mode change 100755 => 100644 src/brownie/README.md diff --git a/src/act-asdf/README.md b/src/act-asdf/README.md index 5f164314c..b9be9bd5f 100644 --- a/src/act-asdf/README.md +++ b/src/act-asdf/README.md @@ -3,11 +3,11 @@ Act is an open source project that allows you to run your github flow locally. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/act-asdf:2": {} + "ghcr.io/devcontainers-extra/features/act-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Act is an open source project that allows you to run your github flow locally. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/act/README.md b/src/act/README.md index a0a06f7d8..207d3eccd 100644 --- a/src/act/README.md +++ b/src/act/README.md @@ -3,11 +3,11 @@ Act is an open source project that allows you to run your github flow locally. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/act:1": {} + "ghcr.io/devcontainers-extra/features/act:1": {} } ``` @@ -18,3 +18,7 @@ Act is an open source project that allows you to run your github flow locally. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/actionlint/README.md b/src/actionlint/README.md index 9ab9f40a9..3635b59ec 100644 --- a/src/actionlint/README.md +++ b/src/actionlint/README.md @@ -3,11 +3,11 @@ Static checker for GitHub Actions workflow files. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/actionlint:1": {} + "ghcr.io/devcontainers-extra/features/actionlint:1": {} } ``` @@ -18,3 +18,7 @@ Static checker for GitHub Actions workflow files. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/actions-runner/README.md b/src/actions-runner/README.md index fb99f2d9b..3aa601743 100644 --- a/src/actions-runner/README.md +++ b/src/actions-runner/README.md @@ -3,11 +3,11 @@ The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the hosted virtual environments, or you can self-host the runner in your own environment. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/actions-runner:1": {} + "ghcr.io/devcontainers-extra/features/actions-runner:1": {} } ``` @@ -19,3 +19,7 @@ The runner is the application that runs a job from a GitHub Actions workflow. It | dotnetVersion | Select the dotnet version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/activemq-sdkman/README.md b/src/activemq-sdkman/README.md index 2fba4775d..890f17781 100644 --- a/src/activemq-sdkman/README.md +++ b/src/activemq-sdkman/README.md @@ -10,11 +10,11 @@ messages between your web applications using STOMP over websockets. Manage your IoT devices using MQTT. Support your existing JMS infrastructure and beyond. ActiveMQ offers the power and flexibility to support any messaging use-case. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/activemq-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/activemq-sdkman:2": {} } ``` @@ -27,3 +27,7 @@ ActiveMQ offers the power and flexibility to support any messaging use-case. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/akamai-cli/README.md b/src/akamai-cli/README.md index ea19655fa..24be7098e 100644 --- a/src/akamai-cli/README.md +++ b/src/akamai-cli/README.md @@ -3,11 +3,11 @@ Use Akamai CLI to configure Akamai platform and products directly from the command line. You can install ready-to-use product packages or build your own custom solutions to manage from CLI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/akamai-cli:1": {} + "ghcr.io/devcontainers-extra/features/akamai-cli:1": {} } ``` @@ -18,3 +18,7 @@ Use Akamai CLI to configure Akamai platform and products directly from the comma | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/alertmanager/README.md b/src/alertmanager/README.md index f3cdb50e8..9c10765be 100644 --- a/src/alertmanager/README.md +++ b/src/alertmanager/README.md @@ -3,11 +3,11 @@ The Alertmanager handles alerts sent by client applications such as the Prometheus server. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/alertmanager:1": {} + "ghcr.io/devcontainers-extra/features/alertmanager:1": {} } ``` @@ -18,3 +18,7 @@ The Alertmanager handles alerts sent by client applications such as the Promethe | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/alp-asdf/README.md b/src/alp-asdf/README.md index af733ec62..cb0046bdf 100644 --- a/src/alp-asdf/README.md +++ b/src/alp-asdf/README.md @@ -3,11 +3,11 @@ Installs alp -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/alp-asdf:2": {} + "ghcr.io/devcontainers-extra/features/alp-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs alp | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/amplify-cli/README.md b/src/amplify-cli/README.md index 93ba26639..586999009 100644 --- a/src/amplify-cli/README.md +++ b/src/amplify-cli/README.md @@ -3,11 +3,11 @@ The Amplify Command Line Interface (CLI) is a unified toolchain to create AWS cloud services for your app. Let's go ahead and install the Amplify CLI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/amplify-cli:2": {} + "ghcr.io/devcontainers-extra/features/amplify-cli:2": {} } ``` @@ -18,3 +18,7 @@ The Amplify Command Line Interface (CLI) is a unified toolchain to create AWS cl | version | Select the version of AWS Amplify CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/angular-cli/README.md b/src/angular-cli/README.md index 68f58dd8a..c5c60b857 100644 --- a/src/angular-cli/README.md +++ b/src/angular-cli/README.md @@ -3,11 +3,11 @@ Angular CLI is a command-line interface tool that you use to initialize, develop, scaffold, and maintain Angular applications directly from a command shell. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/angular-cli:2": {} + "ghcr.io/devcontainers-extra/features/angular-cli:2": {} } ``` @@ -18,3 +18,7 @@ Angular CLI is a command-line interface tool that you use to initialize, develop | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ansible/README.md b/src/ansible/README.md index 3d5f28d4f..f899a6023 100644 --- a/src/ansible/README.md +++ b/src/ansible/README.md @@ -3,11 +3,11 @@ Ansible is a suite of software tools that enables infrastructure as code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ansible:2": {} + "ghcr.io/devcontainers-extra/features/ansible:2": {} } ``` @@ -18,3 +18,7 @@ Ansible is a suite of software tools that enables infrastructure as code. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ant-sdkman/README.md b/src/ant-sdkman/README.md index ed74f7c77..40fb5c786 100644 --- a/src/ant-sdkman/README.md +++ b/src/ant-sdkman/README.md @@ -10,11 +10,11 @@ applications, for instance C or C++ applications. More generally, Ant can be used to pilot any type of process which can be described in terms of targets and tasks. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ant-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/ant-sdkman:2": {} } ``` @@ -27,3 +27,7 @@ tasks. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/apko/README.md b/src/apko/README.md index 712fca39c..2911098f4 100644 --- a/src/apko/README.md +++ b/src/apko/README.md @@ -3,11 +3,11 @@ apko is an apk-based OCI image builder -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/apko:1": {} + "ghcr.io/devcontainers-extra/features/apko:1": {} } ``` @@ -18,3 +18,7 @@ apko is an apk-based OCI image builder | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/apt-get-packages/README.md b/src/apt-get-packages/README.md index aa65cc510..514c65d15 100644 --- a/src/apt-get-packages/README.md +++ b/src/apt-get-packages/README.md @@ -3,11 +3,11 @@ apt-get is a command line tool for interacting with the Advanced Package Tool (APT) library (a package management system for Linux distributions). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {} + "ghcr.io/devcontainers-extra/features/apt-get-packages:1": {} } ``` @@ -17,8 +17,12 @@ apt-get is a command line tool for interacting with the Advanced Package Tool (A |-----|-----|-----|-----| | packages | Comma separated list of packages. | string | - | | ppas | Comma separated list of required ppas (not recommended) | string | - | -| clean_ppas | If ppas used, remove them after usage in order to increase safety | boolean | True | -| preserve_apt_list | Preserving the existing apt list (the result of apt-get update) will decrease container layer size (but will require a re-update of the packages database on next install) | boolean | True | -| force_ppas_on_non_ubuntu | Installing ppas on a non-ubuntu distro (eg. debian) is discouraged | boolean | - | +| clean_ppas | If ppas used, remove them after usage in order to increase safety | boolean | true | +| preserve_apt_list | Preserving the existing apt list (the result of apt-get update) will decrease container layer size (but will require a re-update of the packages database on next install) | boolean | true | +| force_ppas_on_non_ubuntu | Installing ppas on a non-ubuntu distro (eg. debian) is discouraged | boolean | false | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/apt-packages/README.md b/src/apt-packages/README.md index e59b6b64d..1bc68a803 100644 --- a/src/apt-packages/README.md +++ b/src/apt-packages/README.md @@ -3,11 +3,11 @@ Advanced package tool, or APT, is a free-software user interface that works with core libraries to handle the installation and removal of software on Debian and Debian-based Linux distributions. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/apt-packages:1": {} + "ghcr.io/devcontainers-extra/features/apt-packages:1": {} } ``` @@ -17,8 +17,12 @@ Advanced package tool, or APT, is a free-software user interface that works with |-----|-----|-----|-----| | packages | Comma separated list of packages. | string | - | | ppas | Comma separated list of required ppas (not recommended) | string | - | -| clean_ppas | If ppas used, remove them after usage in order to increase safety | boolean | True | -| preserve_apt_list | Preserving the existing apt list (the result of apt-get update) will decrease container layer size (but will require a re-update of the packages database on next install) | boolean | True | -| force_ppas_on_non_ubuntu | Installing ppas on a non-ubuntu distro (eg. debian) is discouraged | boolean | - | +| clean_ppas | If ppas used, remove them after usage in order to increase safety | boolean | true | +| preserve_apt_list | Preserving the existing apt list (the result of apt-get update) will decrease container layer size (but will require a re-update of the packages database on next install) | boolean | true | +| force_ppas_on_non_ubuntu | Installing ppas on a non-ubuntu distro (eg. debian) is discouraged | boolean | false | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/argo-cd/README.md b/src/argo-cd/README.md index c49f464ca..a73eff5cf 100644 --- a/src/argo-cd/README.md +++ b/src/argo-cd/README.md @@ -7,7 +7,7 @@ Declarative GitOps CD for Kubernetes ```json "features": { - "ghcr.io/devcontainers-contrib/features/argo-cd:1": {} + "ghcr.io/devcontainers-extra/features/argo-cd:1": {} } ``` @@ -21,4 +21,4 @@ Declarative GitOps CD for Kubernetes --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/argo-cd/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/argo-workflows/README.md b/src/argo-workflows/README.md index f54e744de..1fd16a095 100644 --- a/src/argo-workflows/README.md +++ b/src/argo-workflows/README.md @@ -7,7 +7,7 @@ Argo Workflows is an open source container-native workflow engine for orchestrat ```json "features": { - "ghcr.io/devcontainers-contrib/features/argo-workflows:1": {} + "ghcr.io/devcontainers-extra/features/argo-workflows:1": {} } ``` @@ -21,4 +21,4 @@ Argo Workflows is an open source container-native workflow engine for orchestrat --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/argo-workflows/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/asciidoctorj-sdkman/README.md b/src/asciidoctorj-sdkman/README.md index ac7f56efa..54452edea 100644 --- a/src/asciidoctorj-sdkman/README.md +++ b/src/asciidoctorj-sdkman/README.md @@ -5,11 +5,11 @@ AsciidoctorJ is the official library for running Asciidoctor on the JVM. Using AsciidoctorJ, you can convert AsciiDoc content or analyze the structure of a parsed AsciiDoc document from Java and other JVM languages. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/asciidoctorj-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/asciidoctorj-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ parsed AsciiDoc document from Java and other JVM languages. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/asdf-package/README.md b/src/asdf-package/README.md index 71106159b..82b78eb51 100644 --- a/src/asdf-package/README.md +++ b/src/asdf-package/README.md @@ -7,7 +7,7 @@ Installs an asdf package. ```json "features": { - "ghcr.io/devcontainers-contrib/features/asdf-package:1": {} + "ghcr.io/devcontainers-extra/features/asdf-package:1": {} } ``` @@ -18,9 +18,10 @@ Installs an asdf package. | plugin | Select the asdf plugin to install. | string | - | | version | Select the version of the asdf plugin to install. | string | latest | | pluginRepo | Select the asdf plugin repo to install (can remain empty in order to use the plugin short-name index https://github.com/asdf-vm/asdf-plugins) | string | - | +| latestVersionPattern | this will be used as argument for 'asdf latest ' command. If none given the default asdf of '[0-9]' will be used. | string | - | --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/asdf-package/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/assemblyscript/README.md b/src/assemblyscript/README.md index 2dd63dc66..13d1dd18a 100644 --- a/src/assemblyscript/README.md +++ b/src/assemblyscript/README.md @@ -3,11 +3,11 @@ AssemblyScript compiles a variant of TypeScript to WebAssembly using Binaryen. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/assemblyscript:2": {} + "ghcr.io/devcontainers-extra/features/assemblyscript:2": {} } ``` @@ -18,3 +18,7 @@ AssemblyScript compiles a variant of TypeScript to WebAssembly using Binaryen. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/atlantis/README.md b/src/atlantis/README.md index 067aa0219..66272611a 100644 --- a/src/atlantis/README.md +++ b/src/atlantis/README.md @@ -3,11 +3,11 @@ Atlantis is a self-hosted golang application that listens for Terraform pull request events via webhooks. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/atlantis:1": {} + "ghcr.io/devcontainers-extra/features/atlantis:1": {} } ``` @@ -18,3 +18,7 @@ Atlantis is a self-hosted golang application that listens for Terraform pull req | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/atmos/README.md b/src/atmos/README.md index bd343e528..1aa3bfbd6 100644 --- a/src/atmos/README.md +++ b/src/atmos/README.md @@ -3,11 +3,11 @@ Atmos is a workflow automation tool. Atmos is lets you take a complex workflow and turn it into a parameterized CLI. You can then run the CLI anywhere, such as locally or in CI/CD. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/atmos:1": {} + "ghcr.io/devcontainers-extra/features/atmos:1": {} } ``` @@ -18,3 +18,7 @@ Atmos is a workflow automation tool. Atmos is lets you take a complex workflow a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/auditjs/README.md b/src/auditjs/README.md index 99f20d819..14503a0e6 100644 --- a/src/auditjs/README.md +++ b/src/auditjs/README.md @@ -3,11 +3,11 @@ AuditJS audits an NPM package.json file to identify known vulnerabilities. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/auditjs:1": {} + "ghcr.io/devcontainers-extra/features/auditjs:1": {} } ``` @@ -18,3 +18,7 @@ AuditJS audits an NPM package.json file to identify known vulnerabilities. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/autoenv/README.md b/src/autoenv/README.md index 3cd0f2f4c..344d9676b 100644 --- a/src/autoenv/README.md +++ b/src/autoenv/README.md @@ -3,11 +3,11 @@ Autoenv is a tool which automatically reads and setups configuration from a . env file when you cd to a directory. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/autoenv:1": {} + "ghcr.io/devcontainers-extra/features/autoenv:1": {} } ``` @@ -18,3 +18,7 @@ Autoenv is a tool which automatically reads and setups configuration from a . en | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/aws-cdk/README.md b/src/aws-cdk/README.md index dddf52512..6f80b7422 100644 --- a/src/aws-cdk/README.md +++ b/src/aws-cdk/README.md @@ -3,11 +3,11 @@ AWS CDK is an open-source software development framework to define cloud infrastructure in code and provision it through AWS CloudFormation. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/aws-cdk:2": {} + "ghcr.io/devcontainers-extra/features/aws-cdk:2": {} } ``` @@ -18,3 +18,7 @@ AWS CDK is an open-source software development framework to define cloud infrast | version | Select the version of AWS Cloud Development Kit (AWS CDK) to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/aws-eb-cli/README.md b/src/aws-eb-cli/README.md index 51fcf3f14..289485bc0 100644 --- a/src/aws-eb-cli/README.md +++ b/src/aws-eb-cli/README.md @@ -3,11 +3,11 @@ AWS Elastic Beanstalk CLI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/aws-eb-cli:1": {} + "ghcr.io/devcontainers-extra/features/aws-eb-cli:1": {} } ``` @@ -18,3 +18,7 @@ AWS Elastic Beanstalk CLI. | version | Select the version of aws-eb-cli to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/aztfexport/README.md b/src/aztfexport/README.md index daa4a574f..5e42671d8 100644 --- a/src/aztfexport/README.md +++ b/src/aztfexport/README.md @@ -3,11 +3,11 @@ aztfexport is a tool to bring existing Azure resources under Terraform's management. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/aztfexport:1": {} + "ghcr.io/devcontainers-extra/features/aztfexport:1": {} } ``` @@ -18,3 +18,7 @@ aztfexport is a tool to bring existing Azure resources under Terraform's managem | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/azure-apiops/README.md b/src/azure-apiops/README.md index d1ec10887..4a0678942 100644 --- a/src/azure-apiops/README.md +++ b/src/azure-apiops/README.md @@ -3,11 +3,11 @@ APIOps applies the concepts of GitOps and DevOps to API deployment. By using practices from these two methodologies, APIOps can enable everyone involved in the lifecycle of API design, development, and deployment with self-service and automated tools to ensure the quality of the specifications and APIs that theyre building. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/azure-apiops:1": {} + "ghcr.io/devcontainers-extra/features/azure-apiops:1": {} } ``` @@ -18,3 +18,7 @@ APIOps applies the concepts of GitOps and DevOps to API deployment. By using pra | version | Select the version you would like to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ballerina-sdkman/README.md b/src/ballerina-sdkman/README.md index 15bccb909..90ea3dee6 100644 --- a/src/ballerina-sdkman/README.md +++ b/src/ballerina-sdkman/README.md @@ -4,11 +4,11 @@ Open source programming language and platform for cloud-era application programmers to easily write software that just works. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ballerina-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/ballerina-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ programmers to easily write software that just works. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bandit/README.md b/src/bandit/README.md index 0239c9942..2d6db8468 100644 --- a/src/bandit/README.md +++ b/src/bandit/README.md @@ -3,11 +3,11 @@ Bandit is a tool designed to find common security issues in Python code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bandit:2": {} + "ghcr.io/devcontainers-extra/features/bandit:2": {} } ``` @@ -18,3 +18,7 @@ Bandit is a tool designed to find common security issues in Python code. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bartib/README.md b/src/bartib/README.md index 0dfaa856d..2b08765d5 100644 --- a/src/bartib/README.md +++ b/src/bartib/README.md @@ -3,11 +3,11 @@ Bartib is a simple timetracker for the command line. It saves a log of all tracked activities as a plaintext file and allows you to create flexible reports. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bartib:1": {} + "ghcr.io/devcontainers-extra/features/bartib:1": {} } ``` @@ -18,3 +18,7 @@ Bartib is a simple timetracker for the command line. It saves a log of all track | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bash-command/README.md b/src/bash-command/README.md index 92405a68b..be91e04ad 100644 --- a/src/bash-command/README.md +++ b/src/bash-command/README.md @@ -7,7 +7,7 @@ Executes a bash command ```json "features": { - "ghcr.io/devcontainers-contrib/features/bash-command:1": {} + "ghcr.io/devcontainers-extra/features/bash-command:1": {} } ``` @@ -21,4 +21,4 @@ Executes a bash command --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/bash-command/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/beehive/README.md b/src/beehive/README.md index 8f8196b7e..b7c17816b 100644 --- a/src/beehive/README.md +++ b/src/beehive/README.md @@ -3,11 +3,11 @@ Beehive is an event and agent system, which allows you to create your own agents that perform automated tasks triggered by events and filters. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/beehive:1": {} + "ghcr.io/devcontainers-extra/features/beehive:1": {} } ``` @@ -18,3 +18,7 @@ Beehive is an event and agent system, which allows you to create your own agents | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bigcommerce-stencil-cli/README.md b/src/bigcommerce-stencil-cli/README.md index 2260cf801..be574cedf 100644 --- a/src/bigcommerce-stencil-cli/README.md +++ b/src/bigcommerce-stencil-cli/README.md @@ -3,11 +3,11 @@ Stencil CLI gives developers the power to locally edit and preview themes with no impact to a merchant’s live storefront, and it's built-in Browsersync(opens in a new tab) capabilities make simultaneous testing across desktop, mobile, and tablet devices a breeze. Once work is complete, developers can push themes to BigCommerce storefronts (and set them live) using Stencil CLI's simple, yet powerful commands. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bigcommerce-stencil-cli:1": {} + "ghcr.io/devcontainers-extra/features/bigcommerce-stencil-cli:1": {} } ``` @@ -18,3 +18,7 @@ Stencil CLI gives developers the power to locally edit and preview themes with n | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bikeshed/README.md b/src/bikeshed/README.md index f0f749f1f..ee2d85627 100644 --- a/src/bikeshed/README.md +++ b/src/bikeshed/README.md @@ -3,11 +3,11 @@ A preprocessor for anyone writing specifications that converts source files into actual specs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bikeshed:2": {} + "ghcr.io/devcontainers-extra/features/bikeshed:2": {} } ``` @@ -18,3 +18,7 @@ A preprocessor for anyone writing specifications that converts source files into | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bin/README.md b/src/bin/README.md index f290e39fa..0e9b20abc 100644 --- a/src/bin/README.md +++ b/src/bin/README.md @@ -3,11 +3,11 @@ bin is an effortless binary manager. It manages binary files downloaded from different sources. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bin:1": {} + "ghcr.io/devcontainers-extra/features/bin:1": {} } ``` @@ -18,3 +18,7 @@ bin is an effortless binary manager. It manages binary files downloaded from dif | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/black/README.md b/src/black/README.md index 3b5a1f5a8..97be09f5c 100644 --- a/src/black/README.md +++ b/src/black/README.md @@ -3,11 +3,11 @@ Black is an uncompromising Python code formatter. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/black:2": {} + "ghcr.io/devcontainers-extra/features/black:2": {} } ``` @@ -18,3 +18,7 @@ Black is an uncompromising Python code formatter. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/blackbox-exporter/README.md b/src/blackbox-exporter/README.md index 12038e6e9..858a7b2b3 100644 --- a/src/blackbox-exporter/README.md +++ b/src/blackbox-exporter/README.md @@ -3,11 +3,11 @@ The blackbox exporter allows blackbox probing of endpoints over HTTP, HTTPS, DNS, TCP, ICMP and gRPC. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/blackbox-exporter:1": {} + "ghcr.io/devcontainers-extra/features/blackbox-exporter:1": {} } ``` @@ -18,3 +18,7 @@ The blackbox exporter allows blackbox probing of endpoints over HTTP, HTTPS, DNS | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bomber/README.md b/src/bomber/README.md index ee6b1fccb..e7bf265b2 100644 --- a/src/bomber/README.md +++ b/src/bomber/README.md @@ -3,11 +3,11 @@ bomber is an application that scans SBOMs for security vulnerabilities. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bomber:1": {} + "ghcr.io/devcontainers-extra/features/bomber:1": {} } ``` @@ -18,3 +18,7 @@ bomber is an application that scans SBOMs for security vulnerabilities. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bower/README.md b/src/bower/README.md index 93a8dcb18..95c0efba8 100644 --- a/src/bower/README.md +++ b/src/bower/README.md @@ -3,11 +3,11 @@ Bower offers a generic, unopinionated solution to the problem of front-end package management, while exposing the package dependency model via an API that can be consumed by a more opinionated build stack. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bower:1": {} + "ghcr.io/devcontainers-extra/features/bower:1": {} } ``` @@ -18,3 +18,7 @@ Bower offers a generic, unopinionated solution to the problem of front-end packa | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/bpipe-sdkman/README.md b/src/bpipe-sdkman/README.md index fd7404387..a7f8e3548 100644 --- a/src/bpipe-sdkman/README.md +++ b/src/bpipe-sdkman/README.md @@ -3,11 +3,11 @@ Bpipe is a framework for running computational pipelines and workflows -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bpipe-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/bpipe-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Bpipe is a framework for running computational pipelines and workflows | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/brownie/README.md b/src/brownie/README.md old mode 100755 new mode 100644 index 80ea5aab6..fe0b21895 --- a/src/brownie/README.md +++ b/src/brownie/README.md @@ -3,11 +3,11 @@ Brownie is a Python-based development and testing framework for smart contracts targeting the Ethereum Virtual Machine. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/brownie:2": {} + "ghcr.io/devcontainers-extra/features/brownie:2": {} } ``` @@ -19,3 +19,7 @@ Brownie is a Python-based development and testing framework for smart contracts | pythonVersion | Select the version to install. | string | os-provided | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/browserify/README.md b/src/browserify/README.md index 985ceb1e7..7fb1ae07b 100644 --- a/src/browserify/README.md +++ b/src/browserify/README.md @@ -3,11 +3,11 @@ Browserify lets you require('modules') in the browser by bundling up all of your dependencies. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/browserify:2": {} + "ghcr.io/devcontainers-extra/features/browserify:2": {} } ``` @@ -18,3 +18,7 @@ Browserify lets you require('modules') in the browser by bundling up all of your | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/btop-homebrew/README.md b/src/btop-homebrew/README.md index 1ebc230d4..c1b2dcc1b 100644 --- a/src/btop-homebrew/README.md +++ b/src/btop-homebrew/README.md @@ -3,11 +3,11 @@ btop is a resource monitor that shows usage and stats for processor, memory, disks, network and processes. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/btop-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/btop-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ btop is a resource monitor that shows usage and stats for processor, memory, dis | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/btrace-sdkman/README.md b/src/btrace-sdkman/README.md index d9feded44..cc9cd5af2 100644 --- a/src/btrace-sdkman/README.md +++ b/src/btrace-sdkman/README.md @@ -6,11 +6,11 @@ to dynamically trace a running Java program (similar to DTrace for OpenSolaris applications and OS). BTrace dynamically instruments the classes of the target application to inject bytecode tracing code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/btrace-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/btrace-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ application to inject bytecode tracing code. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/budibase-cli/README.md b/src/budibase-cli/README.md index 0a983fab6..870c0b821 100644 --- a/src/budibase-cli/README.md +++ b/src/budibase-cli/README.md @@ -3,11 +3,11 @@ The Budibase CLI tool can be used to create a new Budibase installation, to manage an existing installation, make backups or update a Budibase installation to a new version. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/budibase-cli:1": {} + "ghcr.io/devcontainers-extra/features/budibase-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Budibase CLI tool can be used to create a new Budibase installation, to mana | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/buku/README.md b/src/buku/README.md index 4a8e8fccf..216d4111d 100644 --- a/src/buku/README.md +++ b/src/buku/README.md @@ -3,11 +3,11 @@ buku is a powerful bookmark manager and a personal textual mini-web. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/buku:1": {} + "ghcr.io/devcontainers-extra/features/buku:1": {} } ``` @@ -18,3 +18,7 @@ buku is a powerful bookmark manager and a personal textual mini-web. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/caddy/README.md b/src/caddy/README.md index 01a87797a..c62ea1107 100644 --- a/src/caddy/README.md +++ b/src/caddy/README.md @@ -3,11 +3,11 @@ Caddy is a powerful, enterprise-ready, open source web server with automatic HTTPS. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/caddy:1": {} + "ghcr.io/devcontainers-extra/features/caddy:1": {} } ``` @@ -19,3 +19,7 @@ Caddy is a powerful, enterprise-ready, open source web server with automatic HTT | golangVersion | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ccache-asdf/README.md b/src/ccache-asdf/README.md index 15a3785ff..6a17313b9 100644 --- a/src/ccache-asdf/README.md +++ b/src/ccache-asdf/README.md @@ -3,11 +3,11 @@ Ccache is a compiler cache. It speeds up recompilation by caching previous compilations and detecting when the same compilation is being done again. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ccache-asdf:2": {} + "ghcr.io/devcontainers-extra/features/ccache-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Ccache is a compiler cache. It speeds up recompilation by caching previous compi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/checkov/README.md b/src/checkov/README.md index c6b6275ca..15beb9984 100644 --- a/src/checkov/README.md +++ b/src/checkov/README.md @@ -3,11 +3,11 @@ Checkov is a static code analysis tool for infrastructure as code (IaC) and also a software composition analysis (SCA) tool for images and open source packages. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/checkov:1": {} + "ghcr.io/devcontainers-extra/features/checkov:1": {} } ``` @@ -18,3 +18,7 @@ Checkov is a static code analysis tool for infrastructure as code (IaC) and also | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/chezscheme-asdf/README.md b/src/chezscheme-asdf/README.md index 66fe31ec2..cb03d95cf 100644 --- a/src/chezscheme-asdf/README.md +++ b/src/chezscheme-asdf/README.md @@ -3,11 +3,11 @@ Chez Scheme is both a programming language and an implementation of that language, with supporting tools and documentation. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/chezscheme-asdf:2": {} + "ghcr.io/devcontainers-extra/features/chezscheme-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Chez Scheme is both a programming language and an implementation of that languag | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/chisel/README.md b/src/chisel/README.md index 154a9a682..27da38d17 100644 --- a/src/chisel/README.md +++ b/src/chisel/README.md @@ -3,11 +3,11 @@ Chisel is a fast TCP/UDP tunnel, transported over HTTP, secured via SSH. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/chisel:1": {} + "ghcr.io/devcontainers-extra/features/chisel:1": {} } ``` @@ -18,3 +18,7 @@ Chisel is a fast TCP/UDP tunnel, transported over HTTP, secured via SSH. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/circleci-cli/README.md b/src/circleci-cli/README.md index d18ae1a52..e0a7935af 100644 --- a/src/circleci-cli/README.md +++ b/src/circleci-cli/README.md @@ -3,11 +3,11 @@ The CircleCI command line interface (CLI) tool enables developers to build, validate, and test their pipeline jobs in their local development environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/circleci-cli:1": {} + "ghcr.io/devcontainers-extra/features/circleci-cli:1": {} } ``` @@ -18,3 +18,7 @@ The CircleCI command line interface (CLI) tool enables developers to build, vali | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/clojure-asdf/README.md b/src/clojure-asdf/README.md index 42dd8aed9..1e68bbc50 100644 --- a/src/clojure-asdf/README.md +++ b/src/clojure-asdf/README.md @@ -3,11 +3,11 @@ Clojure is a dialect of Lisp, and shares with Lisp the code-as-data philosophy and a powerful macro system. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/clojure-asdf:2": {} + "ghcr.io/devcontainers-extra/features/clojure-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Clojure is a dialect of Lisp, and shares with Lisp the code-as-data philosophy a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cloud-nuke/README.md b/src/cloud-nuke/README.md index f129a8c0f..c36693dc4 100644 --- a/src/cloud-nuke/README.md +++ b/src/cloud-nuke/README.md @@ -3,11 +3,11 @@ cloud-nuke is a tool for cleaning up your cloud accounts by nuking (deleting) all resources within it -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cloud-nuke:1": {} + "ghcr.io/devcontainers-extra/features/cloud-nuke:1": {} } ``` @@ -18,3 +18,7 @@ cloud-nuke is a tool for cleaning up your cloud accounts by nuking (deleting) al | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cloudflare-wrangler/README.md b/src/cloudflare-wrangler/README.md index 149853c05..596bbc8ed 100644 --- a/src/cloudflare-wrangler/README.md +++ b/src/cloudflare-wrangler/README.md @@ -3,11 +3,11 @@ The Workers command-line interface, Wrangler, allows you to create, test, and deploy your Workers projects. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cloudflare-wrangler:1": {} + "ghcr.io/devcontainers-extra/features/cloudflare-wrangler:1": {} } ``` @@ -18,3 +18,7 @@ The Workers command-line interface, Wrangler, allows you to create, test, and de | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cloudflared-fips/README.md b/src/cloudflared-fips/README.md index 486fa63f9..2a127d28e 100644 --- a/src/cloudflared-fips/README.md +++ b/src/cloudflared-fips/README.md @@ -3,11 +3,11 @@ Cloudflare Tunnel client is a command-line client for Cloudflare Tunnel, a tunneling daemon that proxies traffic from the Cloudflare network to your origins. This daemon sits between Cloudflare network and your origin (e.g. a webserver). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cloudflared-fips:1": {} + "ghcr.io/devcontainers-extra/features/cloudflared-fips:1": {} } ``` @@ -18,3 +18,7 @@ Cloudflare Tunnel client is a command-line client for Cloudflare Tunnel, a tunne | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cloudflared/README.md b/src/cloudflared/README.md index 48cadc4c3..2aaafc46d 100644 --- a/src/cloudflared/README.md +++ b/src/cloudflared/README.md @@ -3,11 +3,11 @@ Cloudflare Tunnel client is a command-line client for Cloudflare Tunnel, a tunneling daemon that proxies traffic from the Cloudflare network to your origins. This daemon sits between Cloudflare network and your origin (e.g. a webserver). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cloudflared:1": {} + "ghcr.io/devcontainers-extra/features/cloudflared:1": {} } ``` @@ -18,3 +18,7 @@ Cloudflare Tunnel client is a command-line client for Cloudflare Tunnel, a tunne | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cloudinary-cli/README.md b/src/cloudinary-cli/README.md index bf2982335..d399df0c6 100644 --- a/src/cloudinary-cli/README.md +++ b/src/cloudinary-cli/README.md @@ -3,11 +3,11 @@ Cloudinary CLI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cloudinary-cli:1": {} + "ghcr.io/devcontainers-extra/features/cloudinary-cli:1": {} } ``` @@ -18,3 +18,7 @@ Cloudinary CLI. | version | Select the version of cloudinary-cli to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/codefresh-cli/README.md b/src/codefresh-cli/README.md index 710172af8..0d5bd2a5a 100644 --- a/src/codefresh-cli/README.md +++ b/src/codefresh-cli/README.md @@ -3,11 +3,11 @@ Codefresh CLI provides a full and flexible interface to interact with Codefresh. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/codefresh-cli:1": {} + "ghcr.io/devcontainers-extra/features/codefresh-cli:1": {} } ``` @@ -18,3 +18,7 @@ Codefresh CLI provides a full and flexible interface to interact with Codefresh. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/composer/README.md b/src/composer/README.md index c730f4d29..1ebe9d6ac 100644 --- a/src/composer/README.md +++ b/src/composer/README.md @@ -3,11 +3,11 @@ Composer is an open source dependency management tool for PHP. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/composer:1": {} + "ghcr.io/devcontainers-extra/features/composer:1": {} } ``` @@ -18,3 +18,7 @@ Composer is an open source dependency management tool for PHP. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/concurnas-sdkman/README.md b/src/concurnas-sdkman/README.md index d46571514..4f2c028cb 100644 --- a/src/concurnas-sdkman/README.md +++ b/src/concurnas-sdkman/README.md @@ -5,11 +5,11 @@ Concurnas is an open source JVM programming language designed for building reliable, scalable, high performance concurrent, distributed and parallel systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/concurnas-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/concurnas-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ systems. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/connor-sdkman/README.md b/src/connor-sdkman/README.md index 76b2fd5dd..f826c52a1 100644 --- a/src/connor-sdkman/README.md +++ b/src/connor-sdkman/README.md @@ -4,11 +4,11 @@ ConnOR, short for ConnectOffsetReset, is a commandline tool for resetting Kafka Connect source connector offsets. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/connor-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/connor-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ Connect source connector offsets. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/consul-asdf/README.md b/src/consul-asdf/README.md index 0ab1b62b2..3c53f4ad6 100644 --- a/src/consul-asdf/README.md +++ b/src/consul-asdf/README.md @@ -3,11 +3,11 @@ Consul is a service networking solution to automate network configurations, discover services, and enable secure connectivity across any cloud or runtime. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/consul-asdf:2": {} + "ghcr.io/devcontainers-extra/features/consul-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Consul is a service networking solution to automate network configurations, disc | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/consul-exporter/README.md b/src/consul-exporter/README.md index c4bdb11f1..56aea5492 100644 --- a/src/consul-exporter/README.md +++ b/src/consul-exporter/README.md @@ -3,11 +3,11 @@ Prometheus exporter for Consul metrics. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/consul-exporter:1": {} + "ghcr.io/devcontainers-extra/features/consul-exporter:1": {} } ``` @@ -18,3 +18,7 @@ Prometheus exporter for Consul metrics. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cookiecutter/README.md b/src/cookiecutter/README.md index 3253dfd03..a9224b42c 100644 --- a/src/cookiecutter/README.md +++ b/src/cookiecutter/README.md @@ -3,11 +3,11 @@ Cookiecutter creates projects from project templates. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cookiecutter:2": {} + "ghcr.io/devcontainers-extra/features/cookiecutter:2": {} } ``` @@ -18,3 +18,7 @@ Cookiecutter creates projects from project templates. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/copier/README.md b/src/copier/README.md index 65f5c8b20..b7959ffc7 100644 --- a/src/copier/README.md +++ b/src/copier/README.md @@ -3,11 +3,11 @@ copier creates projects from project templates. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/copier:7": {} + "ghcr.io/devcontainers-extra/features/copier:7": {} } ``` @@ -18,3 +18,7 @@ copier creates projects from project templates. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/corepack/README.md b/src/corepack/README.md index 48ecdd0d1..1efeb7590 100644 --- a/src/corepack/README.md +++ b/src/corepack/README.md @@ -3,11 +3,11 @@ Corepack is a zero-runtime-dependency Node.js script that acts as a bridge between Node.js projects and the package managers they are intended to be used with during development. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/corepack:1": {} + "ghcr.io/devcontainers-extra/features/corepack:1": {} } ``` @@ -18,3 +18,7 @@ Corepack is a zero-runtime-dependency Node.js script that acts as a bridge betwe | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cosign/README.md b/src/cosign/README.md index abfcff150..c528da9ad 100644 --- a/src/cosign/README.md +++ b/src/cosign/README.md @@ -3,11 +3,11 @@ Cosign is a command line utility that can sign and verify software artifact, such as container images and blobs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cosign:1": {} + "ghcr.io/devcontainers-extra/features/cosign:1": {} } ``` @@ -18,3 +18,7 @@ Cosign is a command line utility that can sign and verify software artifact, suc | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/coverage-py/README.md b/src/coverage-py/README.md index 9aed0b1ba..46591e8c9 100644 --- a/src/coverage-py/README.md +++ b/src/coverage-py/README.md @@ -3,11 +3,11 @@ Coverage.py is a tool for measuring code coverage of Python programs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/coverage-py:2": {} + "ghcr.io/devcontainers-extra/features/coverage-py:2": {} } ``` @@ -18,3 +18,7 @@ Coverage.py is a tool for measuring code coverage of Python programs. | version | Select the version of Coverage.py to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/crystal-asdf/README.md b/src/crystal-asdf/README.md index 5055e23b2..19e86f6f9 100644 --- a/src/crystal-asdf/README.md +++ b/src/crystal-asdf/README.md @@ -3,11 +3,11 @@ Crystal is a general-purpose, object-oriented programming language -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/crystal-asdf:2": {} + "ghcr.io/devcontainers-extra/features/crystal-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Crystal is a general-purpose, object-oriented programming language | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cuba-sdkman/README.md b/src/cuba-sdkman/README.md index 7e41da79c..bcea148a3 100644 --- a/src/cuba-sdkman/README.md +++ b/src/cuba-sdkman/README.md @@ -4,11 +4,11 @@ CUBA CLI is an open source command line utility that enables you to easily create projects based on CUBA Platform -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cuba-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/cuba-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ create projects based on CUBA Platform | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/curl-apt-get/README.md b/src/curl-apt-get/README.md index 657273a55..a9f4ee07e 100644 --- a/src/curl-apt-get/README.md +++ b/src/curl-apt-get/README.md @@ -3,13 +3,22 @@ cURL is a computer software project providing a library and command-line tool for transferring data using various network protocols. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/curl-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/curl-homebrew/README.md b/src/curl-homebrew/README.md index 645d93226..25ea45b23 100644 --- a/src/curl-homebrew/README.md +++ b/src/curl-homebrew/README.md @@ -3,11 +3,11 @@ cURL is a computer software project providing a library and command-line tool for transferring data using various network protocols. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/curl-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/curl-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ cURL is a computer software project providing a library and command-line tool fo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cve-bin-tool/README.md b/src/cve-bin-tool/README.md index bc72364df..60739d2e6 100644 --- a/src/cve-bin-tool/README.md +++ b/src/cve-bin-tool/README.md @@ -3,11 +3,11 @@ The CVE Binary Tool is a free, open source tool to help you find known vulnerabilities in software, using data from the National Vulnerability Database (NVD) list of Common Vulnerabilities and Exposures (CVEs). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cve-bin-tool:1": {} + "ghcr.io/devcontainers-extra/features/cve-bin-tool:1": {} } ``` @@ -18,3 +18,7 @@ The CVE Binary Tool is a free, open source tool to help you find known vulnerabi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cxf-sdkman/README.md b/src/cxf-sdkman/README.md index 659b81363..bdfd49b7e 100644 --- a/src/cxf-sdkman/README.md +++ b/src/cxf-sdkman/README.md @@ -6,11 +6,11 @@ services using frontend programming APIs, like JAX-WS and JAX-RS. These services can speak a variety of protocols such as SOAP, XML/HTTP, RESTful HTTP, or CORBA and work over a variety of transports such as HTTP, JMS or JBI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cxf-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/cxf-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ and work over a variety of transports such as HTTP, JMS or JBI. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cyclonedx-cli/README.md b/src/cyclonedx-cli/README.md index fda4b3486..c2a313cf8 100644 --- a/src/cyclonedx-cli/README.md +++ b/src/cyclonedx-cli/README.md @@ -3,11 +3,11 @@ CycloneDX CLI is a utility with many functions to view, convert, and perform operations on SBOMs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cyclonedx-cli:1": {} + "ghcr.io/devcontainers-extra/features/cyclonedx-cli:1": {} } ``` @@ -18,3 +18,7 @@ CycloneDX CLI is a utility with many functions to view, convert, and perform ope | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cyclonedx-python/README.md b/src/cyclonedx-python/README.md index 78f774f16..0cde0e480 100644 --- a/src/cyclonedx-python/README.md +++ b/src/cyclonedx-python/README.md @@ -3,11 +3,11 @@ CycloneDX Python creates CycloneDX Software Bill of Materials (SBOM) from Python projects and environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cyclonedx-python:1": {} + "ghcr.io/devcontainers-extra/features/cyclonedx-python:1": {} } ``` @@ -18,3 +18,7 @@ CycloneDX Python creates CycloneDX Software Bill of Materials (SBOM) from Python | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/cz-cli/README.md b/src/cz-cli/README.md index e7eb5c805..117ffe59c 100644 --- a/src/cz-cli/README.md +++ b/src/cz-cli/README.md @@ -3,11 +3,11 @@ Commitizen is release management tool designed for teams. Commitizen assumes your team uses a standard way of commiting rules and from that foundation, it can bump your project's version, create the changelog, and update files. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cz-cli:1": {} + "ghcr.io/devcontainers-extra/features/cz-cli:1": {} } ``` @@ -18,3 +18,7 @@ Commitizen is release management tool designed for teams. Commitizen assumes you | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/d/README.md b/src/d/README.md index d285653e5..a2b2fa653 100644 --- a/src/d/README.md +++ b/src/d/README.md @@ -7,7 +7,7 @@ D is a general-purpose programming language with static typing, systems-level ac ```json "features": { - "ghcr.io/devcontainers-contrib/features/d:1": {} + "ghcr.io/devcontainers-extra/features/d:1": {} } ``` @@ -22,4 +22,4 @@ D is a general-purpose programming language with static typing, systems-level ac --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/d/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dasel-asdf/README.md b/src/dasel-asdf/README.md index d0ef2fee2..550a9aff6 100644 --- a/src/dasel-asdf/README.md +++ b/src/dasel-asdf/README.md @@ -3,11 +3,11 @@ Dasel (short for data-selector) allows you to query and modify data structures using selector strings. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dasel-asdf:2": {} + "ghcr.io/devcontainers-extra/features/dasel-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Dasel (short for data-selector) allows you to query and modify data structures u | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dashlane-cli/README.md b/src/dashlane-cli/README.md index 9edf13c77..ad528b96e 100644 --- a/src/dashlane-cli/README.md +++ b/src/dashlane-cli/README.md @@ -3,11 +3,11 @@ Dashlane Command Line Interface allows you to get your passwords, otp and secure notes right from your terminal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dashlane-cli:1": {} + "ghcr.io/devcontainers-extra/features/dashlane-cli:1": {} } ``` @@ -18,3 +18,7 @@ Dashlane Command Line Interface allows you to get your passwords, otp and secure | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/datadog-ci-cli/README.md b/src/datadog-ci-cli/README.md index d876e49ee..209887539 100644 --- a/src/datadog-ci-cli/README.md +++ b/src/datadog-ci-cli/README.md @@ -3,11 +3,11 @@ Execute commands with Datadog from within your Continuous Integration/Continuous Deployment scripts to perform end-to-end tests of your application before applying your changes or deploying. datadog-ci allows you to run Continuous Testing tests and wait for the results. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/datadog-ci-cli:1": {} + "ghcr.io/devcontainers-extra/features/datadog-ci-cli:1": {} } ``` @@ -18,3 +18,7 @@ Execute commands with Datadog from within your Continuous Integration/Continuous | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/datasette/README.md b/src/datasette/README.md index cba3737b6..c4df437b5 100644 --- a/src/datasette/README.md +++ b/src/datasette/README.md @@ -3,11 +3,11 @@ Datasette is a tool for exploring and publishing data and is aimed at data journalists, museum curators, archivists, local governments, scientists and researchers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/datasette:2": {} + "ghcr.io/devcontainers-extra/features/datasette:2": {} } ``` @@ -18,3 +18,7 @@ Datasette is a tool for exploring and publishing data and is aimed at data journ | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dbt-coverage/README.md b/src/dbt-coverage/README.md index bdc7532f1..7144cedb3 100644 --- a/src/dbt-coverage/README.md +++ b/src/dbt-coverage/README.md @@ -3,11 +3,11 @@ dbt-coverage is a one-stop-shop for docs and test coverage of dbt projects. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dbt-coverage:1": {} + "ghcr.io/devcontainers-extra/features/dbt-coverage:1": {} } ``` @@ -18,3 +18,7 @@ dbt-coverage is a one-stop-shop for docs and test coverage of dbt projects. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ddgr-apt-get/README.md b/src/ddgr-apt-get/README.md index ffe2b110e..d9c9c084f 100644 --- a/src/ddgr-apt-get/README.md +++ b/src/ddgr-apt-get/README.md @@ -3,13 +3,22 @@ ddgr is a cmdline utility to search DuckDuckGo from the terminal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ddgr-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/ddgr-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ddgr-homebrew/README.md b/src/ddgr-homebrew/README.md index 7eb850c58..a8ec95ae0 100644 --- a/src/ddgr-homebrew/README.md +++ b/src/ddgr-homebrew/README.md @@ -3,11 +3,11 @@ ddgr is a cmdline utility to search DuckDuckGo from the terminal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ddgr-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/ddgr-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ ddgr is a cmdline utility to search DuckDuckGo from the terminal. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/deno-asdf/README.md b/src/deno-asdf/README.md index ad64ed30a..036261fbf 100644 --- a/src/deno-asdf/README.md +++ b/src/deno-asdf/README.md @@ -3,11 +3,11 @@ Deno is a simple, modern runtime for JavaScript and TypeScript that uses V8 and is built in Rust. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/deno-asdf:2": {} + "ghcr.io/devcontainers-extra/features/deno-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Deno is a simple, modern runtime for JavaScript and TypeScript that uses V8 and | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/deno/README.md b/src/deno/README.md index 4e009ca6f..4ec339a16 100644 --- a/src/deno/README.md +++ b/src/deno/README.md @@ -7,7 +7,7 @@ Deno is a simple, modern runtime for JavaScript and TypeScript that uses V8 and ```json "features": { - "ghcr.io/devcontainers-contrib/features/deno:1": {} + "ghcr.io/devcontainers-extra/features/deno:1": {} } ``` @@ -21,4 +21,4 @@ Deno is a simple, modern runtime for JavaScript and TypeScript that uses V8 and --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/deno/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/devcontainers-cli/README.md b/src/devcontainers-cli/README.md index d82468b13..d78084910 100644 --- a/src/devcontainers-cli/README.md +++ b/src/devcontainers-cli/README.md @@ -3,11 +3,11 @@ The devcontainers CLI, which configures devcontainers from devcontainer.json. Requires the Docker client and access to a Docker engine socket (e.g. by mounting the host's Docker socket into the container) -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/devcontainers-cli:1": {} + "ghcr.io/devcontainers-extra/features/devcontainers-cli:1": {} } ``` @@ -18,3 +18,7 @@ The devcontainers CLI, which configures devcontainers from devcontainer.json. Re | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/digitalocean-cli/README.md b/src/digitalocean-cli/README.md index 295ead8c9..b5fe5f9bf 100644 --- a/src/digitalocean-cli/README.md +++ b/src/digitalocean-cli/README.md @@ -7,7 +7,7 @@ DigitalOcean CLI allows you to interact with the DigitalOcean API via the comman ```json "features": { - "ghcr.io/devcontainers-contrib/features/digitalocean-cli:1": {} + "ghcr.io/devcontainers-extra/features/digitalocean-cli:1": {} } ``` @@ -21,4 +21,4 @@ DigitalOcean CLI allows you to interact with the DigitalOcean API via the comman --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/digitalocean-cli/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/direnv-asdf/README.md b/src/direnv-asdf/README.md index 63f300199..2720ea0e3 100644 --- a/src/direnv-asdf/README.md +++ b/src/direnv-asdf/README.md @@ -3,11 +3,11 @@ direnv is an extension for your shell. It augments existing shells with a new feature that can load and unload environment variables depending on the current directory. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/direnv-asdf:2": {} + "ghcr.io/devcontainers-extra/features/direnv-asdf:2": {} } ``` @@ -18,3 +18,7 @@ direnv is an extension for your shell. It augments existing shells with a new fe | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/direnv/README.md b/src/direnv/README.md index 8654d47eb..2d60d794b 100644 --- a/src/direnv/README.md +++ b/src/direnv/README.md @@ -3,11 +3,11 @@ direnv is an extension for your shell. It augments existing shells with a new feature that can load and unload environment variables depending on the current directory. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/direnv:1": {} + "ghcr.io/devcontainers-extra/features/direnv:1": {} } ``` @@ -18,3 +18,7 @@ direnv is an extension for your shell. It augments existing shells with a new fe | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dive/README.md b/src/dive/README.md index 612f8512f..405399fa5 100644 --- a/src/dive/README.md +++ b/src/dive/README.md @@ -3,11 +3,11 @@ Dive is a tool for exploring a docker image, layer contents, and discovering ways to shrink the size of your Docker/OCI image. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dive:1": {} + "ghcr.io/devcontainers-extra/features/dive:1": {} } ``` @@ -18,3 +18,7 @@ Dive is a tool for exploring a docker image, layer contents, and discovering way | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dnote/README.md b/src/dnote/README.md index a1a75ed77..ee9a4cab7 100644 --- a/src/dnote/README.md +++ b/src/dnote/README.md @@ -3,11 +3,11 @@ Dnote is a simple command line notebook for programmers. It keeps you focused by providing a way of effortlessly capturing and retrieving information without leaving your terminal. It also offers a seamless multi-device sync. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dnote:1": {} + "ghcr.io/devcontainers-extra/features/dnote:1": {} } ``` @@ -18,3 +18,7 @@ Dnote is a simple command line notebook for programmers. It keeps you focused by | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/doctoolchain-sdkman/README.md b/src/doctoolchain-sdkman/README.md index c6ba303ca..03b5c1d8c 100644 --- a/src/doctoolchain-sdkman/README.md +++ b/src/doctoolchain-sdkman/README.md @@ -6,11 +6,11 @@ architecture plus some additional automation. The basis of docToolchain is the philosophy that software documentation should be treated in the same way as code together with the arc42 template for software architecture. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/doctoolchain-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/doctoolchain-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ together with the arc42 template for software architecture. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dprint-asdf/README.md b/src/dprint-asdf/README.md index 7128c9176..c612d4ca6 100644 --- a/src/dprint-asdf/README.md +++ b/src/dprint-asdf/README.md @@ -3,11 +3,11 @@ dprint is a pluggable and configurable code formatting platform written in Rust. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dprint-asdf:2": {} + "ghcr.io/devcontainers-extra/features/dprint-asdf:2": {} } ``` @@ -18,3 +18,7 @@ dprint is a pluggable and configurable code formatting platform written in Rust. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/driftctl/README.md b/src/driftctl/README.md index a8632cf7f..19f4dac00 100644 --- a/src/driftctl/README.md +++ b/src/driftctl/README.md @@ -3,11 +3,11 @@ driftctl detect, track and alert on infrastructure drift. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/driftctl:1": {} + "ghcr.io/devcontainers-extra/features/driftctl:1": {} } ``` @@ -18,3 +18,7 @@ driftctl detect, track and alert on infrastructure drift. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/drone-cli/README.md b/src/drone-cli/README.md index 99371d147..eeeb515b3 100644 --- a/src/drone-cli/README.md +++ b/src/drone-cli/README.md @@ -3,11 +3,11 @@ Drone CLI is a command line client for the Drone continuous integration server. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/drone-cli:1": {} + "ghcr.io/devcontainers-extra/features/drone-cli:1": {} } ``` @@ -18,3 +18,7 @@ Drone CLI is a command line client for the Drone continuous integration server. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dua/README.md b/src/dua/README.md index da3aeea42..add90ca8e 100644 --- a/src/dua/README.md +++ b/src/dua/README.md @@ -3,11 +3,11 @@ dua (Disk Usage Analyzer) is a tool to conveniently learn about the usage of disk space of a given directory. It's parallel by default and will max out your SSD, providing relevant information as fast as possible. Optionally delete superfluous data, and do so more quickly than rm. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dua:1": {} + "ghcr.io/devcontainers-extra/features/dua:1": {} } ``` @@ -18,3 +18,7 @@ dua (Disk Usage Analyzer) is a tool to conveniently learn about the usage of dis | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/duf/README.md b/src/duf/README.md index 2e743ce16..52851d712 100644 --- a/src/duf/README.md +++ b/src/duf/README.md @@ -3,11 +3,11 @@ duf is a better 'df' alternative. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/duf:1": {} + "ghcr.io/devcontainers-extra/features/duf:1": {} } ``` @@ -18,3 +18,7 @@ duf is a better 'df' alternative. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/dufs/README.md b/src/dufs/README.md index ff6cb96e0..ca54f3e66 100644 --- a/src/dufs/README.md +++ b/src/dufs/README.md @@ -3,11 +3,11 @@ Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav and more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/dufs:1": {} + "ghcr.io/devcontainers-extra/features/dufs:1": {} } ``` @@ -18,3 +18,7 @@ Dufs is a distinctive utility file server that supports static serving, uploadin | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/eas-cli/README.md b/src/eas-cli/README.md index a7706370d..a8b4aba0a 100644 --- a/src/eas-cli/README.md +++ b/src/eas-cli/README.md @@ -3,11 +3,11 @@ EAS CLI is the command line app you will use to interact with EAS services from your terminal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/eas-cli:1": {} + "ghcr.io/devcontainers-extra/features/eas-cli:1": {} } ``` @@ -18,3 +18,7 @@ EAS CLI is the command line app you will use to interact with EAS services from | version | Select the version of EAS CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/eget/README.md b/src/eget/README.md index 52e3bc07b..f831b25d5 100644 --- a/src/eget/README.md +++ b/src/eget/README.md @@ -3,11 +3,11 @@ Eget easily install prebuilt binaries from GitHub. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/eget:1": {} + "ghcr.io/devcontainers-extra/features/eget:1": {} } ``` @@ -18,3 +18,7 @@ Eget easily install prebuilt binaries from GitHub. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/elasticsearch-asdf/README.md b/src/elasticsearch-asdf/README.md index 77e4f109b..4bc2fee5d 100644 --- a/src/elasticsearch-asdf/README.md +++ b/src/elasticsearch-asdf/README.md @@ -3,11 +3,11 @@ Elasticsearch is a search engine based on the Lucene library. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/elasticsearch-asdf:2": {} + "ghcr.io/devcontainers-extra/features/elasticsearch-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Elasticsearch is a search engine based on the Lucene library. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/elm-asdf/README.md b/src/elm-asdf/README.md index 514236c03..44ea04292 100644 --- a/src/elm-asdf/README.md +++ b/src/elm-asdf/README.md @@ -3,11 +3,11 @@ Elm is a domain-specific programming language for declaratively creating web browser-based graphical user interfaces. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/elm-asdf:2": {} + "ghcr.io/devcontainers-extra/features/elm-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Elm is a domain-specific programming language for declaratively creating web bro | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ember-cli/README.md b/src/ember-cli/README.md index f8d68c3f9..c08823be6 100644 --- a/src/ember-cli/README.md +++ b/src/ember-cli/README.md @@ -3,11 +3,11 @@ The Ember CLI (command line interface) is the official way to create, build, test, and serve the files that make up an Ember app or addon. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ember-cli:1": {} + "ghcr.io/devcontainers-extra/features/ember-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Ember CLI (command line interface) is the official way to create, build, tes | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/envoy/README.md b/src/envoy/README.md index dc77662bf..f7643bcdd 100644 --- a/src/envoy/README.md +++ b/src/envoy/README.md @@ -3,11 +3,11 @@ Envoy is an L7 proxy and communication bus designed for large modern service oriented architectures. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/envoy:1": {} + "ghcr.io/devcontainers-extra/features/envoy:1": {} } ``` @@ -18,3 +18,7 @@ Envoy is an L7 proxy and communication bus designed for large modern service ori | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/epinio/README.md b/src/epinio/README.md index ab81d187b..7fd0f7d60 100644 --- a/src/epinio/README.md +++ b/src/epinio/README.md @@ -3,11 +3,11 @@ Epinio is an opinionated platform that runs on Kubernetes to take you from Code to URL in one step. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/epinio:1": {} + "ghcr.io/devcontainers-extra/features/epinio:1": {} } ``` @@ -18,3 +18,7 @@ Epinio is an opinionated platform that runs on Kubernetes to take you from Code | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/etcd/README.md b/src/etcd/README.md index 671b06c20..aca14fff8 100644 --- a/src/etcd/README.md +++ b/src/etcd/README.md @@ -3,11 +3,11 @@ etcd is a distributed reliable key-value store for the most critical data of a distributed system. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/etcd:1": {} + "ghcr.io/devcontainers-extra/features/etcd:1": {} } ``` @@ -18,3 +18,7 @@ etcd is a distributed reliable key-value store for the most critical data of a d | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/exa/README.md b/src/exa/README.md index 923af0200..051c5e167 100644 --- a/src/exa/README.md +++ b/src/exa/README.md @@ -3,11 +3,11 @@ exa is a modern replacement for ls. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/exa:1": {} + "ghcr.io/devcontainers-extra/features/exa:1": {} } ``` @@ -18,3 +18,7 @@ exa is a modern replacement for ls. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/exercism-cli/README.md b/src/exercism-cli/README.md index 06f4c98c7..a2939f3f7 100644 --- a/src/exercism-cli/README.md +++ b/src/exercism-cli/README.md @@ -3,11 +3,11 @@ Exercism CLI is the link between the Exercism website and your local work environment. It lets you download exercises and submit your solution to the site. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/exercism-cli:1": {} + "ghcr.io/devcontainers-extra/features/exercism-cli:1": {} } ``` @@ -18,3 +18,7 @@ Exercism CLI is the link between the Exercism website and your local work envir | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/expo-cli/README.md b/src/expo-cli/README.md index a53af2638..106bfffe2 100644 --- a/src/expo-cli/README.md +++ b/src/expo-cli/README.md @@ -3,11 +3,11 @@ The Expo CLI is a command-line tool that is the primary interface between a developer and other Expo tools. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/expo-cli:1": {} + "ghcr.io/devcontainers-extra/features/expo-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Expo CLI is a command-line tool that is the primary interface between a deve | version | Select the version of Expo CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/express-generator/README.md b/src/express-generator/README.md index 6fff5ed8c..2257ff363 100644 --- a/src/express-generator/README.md +++ b/src/express-generator/README.md @@ -3,11 +3,11 @@ Express Application Generator is a handy tool for creating skeleton Express web apps. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/express-generator:2": {} + "ghcr.io/devcontainers-extra/features/express-generator:2": {} } ``` @@ -18,3 +18,7 @@ Express Application Generator is a handy tool for creating skeleton Express web | version | Select the version of Express Application Generator to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fd/README.md b/src/fd/README.md index ed2070c20..1caee5649 100644 --- a/src/fd/README.md +++ b/src/fd/README.md @@ -3,11 +3,11 @@ fd is a simple, fast and user-friendly alternative to 'find'. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fd:1": {} + "ghcr.io/devcontainers-extra/features/fd:1": {} } ``` @@ -18,3 +18,7 @@ fd is a simple, fast and user-friendly alternative to 'find'. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ffmpeg-apt-get/README.md b/src/ffmpeg-apt-get/README.md index 2740fbc55..dda7237df 100644 --- a/src/ffmpeg-apt-get/README.md +++ b/src/ffmpeg-apt-get/README.md @@ -3,13 +3,22 @@ FFmpeg is the leading multimedia framework, able to decode, encode, transcode, mux, demux, stream, filter and play pretty much anything that humans and machines have created. It supports the most obscure ancient formats up to the cutting edge. No matter if they were designed by some standards committee, the community or a corporation. It is also highly portable: FFmpeg compiles, runs, and passes our testing infrastructure FATE across Linux, Mac OS X, Microsoft Windows, the BSDs, Solaris, etc. under a wide variety of build environments, machine architectures, and configurations. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ffmpeg-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/ffmpeg-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/firebase-cli/README.md b/src/firebase-cli/README.md index 24e49313f..e2a47b93e 100644 --- a/src/firebase-cli/README.md +++ b/src/firebase-cli/README.md @@ -3,11 +3,11 @@ The Firebase CLI (GitHub) provides a variety of tools for managing, viewing, and deploying to Firebase projects. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/firebase-cli:2": {} + "ghcr.io/devcontainers-extra/features/firebase-cli:2": {} } ``` @@ -18,3 +18,7 @@ The Firebase CLI (GitHub) provides a variety of tools for managing, viewing, and | version | Select the version of Firebase CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fish-apt-get/README.md b/src/fish-apt-get/README.md index 38e496eae..959d9d29b 100644 --- a/src/fish-apt-get/README.md +++ b/src/fish-apt-get/README.md @@ -3,13 +3,22 @@ fish is a smart and user-friendly command line shell for macOS, Linux, and the rest of the family. fish includes features like syntax highlighting, autosuggest-as-you-type, and fancy tab completions that just work, with no configuration required. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fish-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/fish-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fkill/README.md b/src/fkill/README.md index 8c438db37..6454b7229 100644 --- a/src/fkill/README.md +++ b/src/fkill/README.md @@ -3,11 +3,11 @@ fkill fabulously kill processes. Cross-platform. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fkill:2": {} + "ghcr.io/devcontainers-extra/features/fkill:2": {} } ``` @@ -18,3 +18,7 @@ fkill fabulously kill processes. Cross-platform. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/flake8/README.md b/src/flake8/README.md index 06b3c9fee..058525308 100644 --- a/src/flake8/README.md +++ b/src/flake8/README.md @@ -3,11 +3,11 @@ Flake8 is a python tool that glues together pycodestyle, pyflakes, mccabe, and third-party plugins. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/flake8:2": {} + "ghcr.io/devcontainers-extra/features/flake8:2": {} } ``` @@ -19,3 +19,7 @@ Flake8 is a python tool that glues together pycodestyle, pyflakes, mccabe, and t | plugins | A space delimitered list of flake8 plugins (will be injected into the flake8 pipx env). see proposals for examples | string | - | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/flink-sdkman/README.md b/src/flink-sdkman/README.md index c536140af..4e880313c 100644 --- a/src/flink-sdkman/README.md +++ b/src/flink-sdkman/README.md @@ -6,11 +6,11 @@ framework.It's a distributed processing engine for stateful computations over unbounded and bounded data streams.It has been designed to run in all common cluster environments, perform computations at in-memory speed and at any scale. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/flink-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/flink-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ cluster environments, perform computations at in-memory speed and at any scale. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/flit/README.md b/src/flit/README.md index f3593dfe4..9f693fec1 100644 --- a/src/flit/README.md +++ b/src/flit/README.md @@ -3,11 +3,11 @@ Flit is a simple way to put Python packages and modules on PyPI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/flit:2": {} + "ghcr.io/devcontainers-extra/features/flit:2": {} } ``` @@ -18,3 +18,7 @@ Flit is a simple way to put Python packages and modules on PyPI. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/former2-cli/README.md b/src/former2-cli/README.md index 8e64cfbdf..0b651e9a0 100644 --- a/src/former2-cli/README.md +++ b/src/former2-cli/README.md @@ -3,11 +3,11 @@ The Former2 CLI allows you to use the former2.com tool directly from your command line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/former2-cli:1": {} + "ghcr.io/devcontainers-extra/features/former2-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Former2 CLI allows you to use the former2.com tool directly from your comman | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fossil-apt-get/README.md b/src/fossil-apt-get/README.md index b737e9e99..96a723e9f 100644 --- a/src/fossil-apt-get/README.md +++ b/src/fossil-apt-get/README.md @@ -3,13 +3,22 @@ Fossil is a simple, high-reliability, distributed software configuration management system. It is capable of performing distributed version control, bug tracking, wiki services, and blogging. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fossil-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/fossil-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fossil-homebrew/README.md b/src/fossil-homebrew/README.md index 6da3cee33..089dc9a63 100644 --- a/src/fossil-homebrew/README.md +++ b/src/fossil-homebrew/README.md @@ -3,11 +3,11 @@ Fossil is a simple, high-reliability, distributed software configuration management system. It is capable of performing distributed version control, bug tracking, wiki services, and blogging. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fossil-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/fossil-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Fossil is a simple, high-reliability, distributed software configuration managem | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fulcio/README.md b/src/fulcio/README.md index b2cc156df..54fb39817 100644 --- a/src/fulcio/README.md +++ b/src/fulcio/README.md @@ -3,11 +3,11 @@ Fulcio is a free-to-use certificate authority for issuing code signing certificates for an OpenID Connect (OIDC) identity, such as email address. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fulcio:1": {} + "ghcr.io/devcontainers-extra/features/fulcio:1": {} } ``` @@ -18,3 +18,7 @@ Fulcio is a free-to-use certificate authority for issuing code signing certifica | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/fzf/README.md b/src/fzf/README.md index 9d1afb59f..2c88fd5c9 100644 --- a/src/fzf/README.md +++ b/src/fzf/README.md @@ -3,11 +3,11 @@ fzf is a general-purpose command-line fuzzy finder. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/fzf:1": {} + "ghcr.io/devcontainers-extra/features/fzf:1": {} } ``` @@ -18,3 +18,7 @@ fzf is a general-purpose command-line fuzzy finder. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gaiden-sdkman/README.md b/src/gaiden-sdkman/README.md index 4311fa237..14ddd3cd2 100644 --- a/src/gaiden-sdkman/README.md +++ b/src/gaiden-sdkman/README.md @@ -3,11 +3,11 @@ Gaiden is a tool that makes it easy to create documentation with Markdown. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gaiden-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/gaiden-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Gaiden is a tool that makes it easy to create documentation with Markdown. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ganache/README.md b/src/ganache/README.md index 1be84b4b1..316b2a4a3 100644 --- a/src/ganache/README.md +++ b/src/ganache/README.md @@ -3,11 +3,11 @@ Ganache is an Ethereum simulator that makes developing Ethereum applications faster, easier, and safer. It includes all popular RPC functions and features (like events) and can be run deterministically to make development a breeze. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ganache:1": {} + "ghcr.io/devcontainers-extra/features/ganache:1": {} } ``` @@ -18,3 +18,7 @@ Ganache is an Ethereum simulator that makes developing Ethereum applications fas | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gdbgui/README.md b/src/gdbgui/README.md index dd55b6c04..2719ed321 100644 --- a/src/gdbgui/README.md +++ b/src/gdbgui/README.md @@ -3,11 +3,11 @@ gdbgui is a browser-based frontend to gdb (gnu debugger). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gdbgui:2": {} + "ghcr.io/devcontainers-extra/features/gdbgui:2": {} } ``` @@ -18,3 +18,7 @@ gdbgui is a browser-based frontend to gdb (gnu debugger). | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gh-cli/README.md b/src/gh-cli/README.md index 55424d628..369760bff 100644 --- a/src/gh-cli/README.md +++ b/src/gh-cli/README.md @@ -3,11 +3,11 @@ GitHub CLI is a command-line interface to GitHub for use in your terminal or your scripts. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gh-cli:1": {} + "ghcr.io/devcontainers-extra/features/gh-cli:1": {} } ``` @@ -18,3 +18,7 @@ GitHub CLI is a command-line interface to GitHub for use in your terminal or you | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/git-lfs/README.md b/src/git-lfs/README.md index 855e81839..4132805f8 100644 --- a/src/git-lfs/README.md +++ b/src/git-lfs/README.md @@ -3,11 +3,11 @@ Git LFS is a command line extension and specification for managing large files with Git. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/git-lfs:1": {} + "ghcr.io/devcontainers-extra/features/git-lfs:1": {} } ``` @@ -18,3 +18,7 @@ Git LFS is a command line extension and specification for managing large files w | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gitmux/README.md b/src/gitmux/README.md index 7c38d7145..bc2da1d3b 100644 --- a/src/gitmux/README.md +++ b/src/gitmux/README.md @@ -3,11 +3,11 @@ Gitmux shows git status in your tmux status bar. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gitmux:1": {} + "ghcr.io/devcontainers-extra/features/gitmux:1": {} } ``` @@ -18,3 +18,7 @@ Gitmux shows git status in your tmux status bar. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gitomatic/README.md b/src/gitomatic/README.md index 188a01e01..27f03fae0 100644 --- a/src/gitomatic/README.md +++ b/src/gitomatic/README.md @@ -3,11 +3,11 @@ gitomatic is a tool to monitor git repositories and automatically pull & push changes. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gitomatic:1": {} + "ghcr.io/devcontainers-extra/features/gitomatic:1": {} } ``` @@ -18,3 +18,7 @@ gitomatic is a tool to monitor git repositories and automatically pull & push ch | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gitsign-credential-cache/README.md b/src/gitsign-credential-cache/README.md index 5829d1080..f89554b86 100644 --- a/src/gitsign-credential-cache/README.md +++ b/src/gitsign-credential-cache/README.md @@ -3,11 +3,11 @@ gitsign-credential-cache is a optional helper binary that allows users to cache signing credentials. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gitsign-credential-cache:1": {} + "ghcr.io/devcontainers-extra/features/gitsign-credential-cache:1": {} } ``` @@ -18,3 +18,7 @@ gitsign-credential-cache is a optional helper binary that allows users to cache | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gitsign/README.md b/src/gitsign/README.md index 0a17645b5..8808e39f2 100644 --- a/src/gitsign/README.md +++ b/src/gitsign/README.md @@ -3,11 +3,11 @@ Gitsign implements keyless Sigstore to sign Git commits with a valid OpenID Connect identity. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gitsign:1": {} + "ghcr.io/devcontainers-extra/features/gitsign:1": {} } ``` @@ -18,3 +18,7 @@ Gitsign implements keyless Sigstore to sign Git commits with a valid OpenID Conn | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gitty/README.md b/src/gitty/README.md index 058a8139c..b25484340 100644 --- a/src/gitty/README.md +++ b/src/gitty/README.md @@ -3,11 +3,11 @@ gitty is a smart little CLI helper for git projects, that shows you all the relevant issues, pull requests and changes at a quick glance, right on the command-line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gitty:1": {} + "ghcr.io/devcontainers-extra/features/gitty:1": {} } ``` @@ -18,3 +18,7 @@ gitty is a smart little CLI helper for git projects, that shows you all the rele | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/glances/README.md b/src/glances/README.md index 1910cdf52..d6a17d579 100644 --- a/src/glances/README.md +++ b/src/glances/README.md @@ -3,11 +3,11 @@ Glances is a cross-platform monitoring tool which aims to present a large amount of monitoring information through a curses or Web based interface. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/glances:2": {} + "ghcr.io/devcontainers-extra/features/glances:2": {} } ``` @@ -18,3 +18,7 @@ Glances is a cross-platform monitoring tool which aims to present a large amount | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gleam/README.md b/src/gleam/README.md index 9a30eb3df..edf1b329f 100644 --- a/src/gleam/README.md +++ b/src/gleam/README.md @@ -3,11 +3,11 @@ Gleam is a friendly language for building type-safe, scalable systems. (Gleam compiles to Erlang code, so Erlang needs to be installed to run Gleam code.) -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gleam:1": {} + "ghcr.io/devcontainers-extra/features/gleam:1": {} } ``` @@ -18,3 +18,7 @@ Gleam is a friendly language for building type-safe, scalable systems. (Gleam co | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/go-task/README.md b/src/go-task/README.md index 328f4bf21..5637d0827 100644 --- a/src/go-task/README.md +++ b/src/go-task/README.md @@ -3,11 +3,11 @@ Task is a task runner / build tool that aims to be simpler and easier to use than, for example, GNU Make. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/go-task:1": {} + "ghcr.io/devcontainers-extra/features/go-task:1": {} } ``` @@ -18,3 +18,7 @@ Task is a task runner / build tool that aims to be simpler and easier to use tha | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/graalvm-asdf/README.md b/src/graalvm-asdf/README.md index a1101ebb8..f04c5ad74 100644 --- a/src/graalvm-asdf/README.md +++ b/src/graalvm-asdf/README.md @@ -3,11 +3,11 @@ GraalVM is a high-performance JDK designed to accelerate Java application performance while consuming fewer resources. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/graalvm-asdf:2": {} + "ghcr.io/devcontainers-extra/features/graalvm-asdf:2": {} } ``` @@ -18,3 +18,7 @@ GraalVM is a high-performance JDK designed to accelerate Java application perfor | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gradle-sdkman/README.md b/src/gradle-sdkman/README.md index 1083a42dd..6be018f1a 100644 --- a/src/gradle-sdkman/README.md +++ b/src/gradle-sdkman/README.md @@ -7,11 +7,11 @@ instead of the more traditional XML form of declaring the project configuration. Gradle uses a directed acyclic graph (DAG) to determine the order in which tasks can be run. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gradle-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/gradle-sdkman:2": {} } ``` @@ -24,3 +24,7 @@ can be run. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gradleprofiler-sdkman/README.md b/src/gradleprofiler-sdkman/README.md index 66042cb8c..eaae28ac0 100644 --- a/src/gradleprofiler-sdkman/README.md +++ b/src/gradleprofiler-sdkman/README.md @@ -3,11 +3,11 @@ A tool for gathering profiling and benchmarking information for Gradle builds -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gradleprofiler-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/gradleprofiler-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ A tool for gathering profiling and benchmarking information for Gradle builds | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/grails-sdkman/README.md b/src/grails-sdkman/README.md index b820f7232..961e6562c 100644 --- a/src/grails-sdkman/README.md +++ b/src/grails-sdkman/README.md @@ -8,11 +8,11 @@ to be immediately productive whilst providing powerful features, including integrated ORM, Domain-Specific Languages, runtime and compile-time meta-programming and Asynchronous programming. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/grails-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/grails-sdkman:2": {} } ``` @@ -25,3 +25,7 @@ meta-programming and Asynchronous programming. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/graphite-exporter/README.md b/src/graphite-exporter/README.md index 752261917..a6ef7dbc9 100644 --- a/src/graphite-exporter/README.md +++ b/src/graphite-exporter/README.md @@ -3,11 +3,11 @@ An exporter for metrics exported in the Graphite plaintext protocol. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/graphite-exporter:1": {} + "ghcr.io/devcontainers-extra/features/graphite-exporter:1": {} } ``` @@ -18,3 +18,7 @@ An exporter for metrics exported in the Graphite plaintext protocol. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/groovy-sdkman/README.md b/src/groovy-sdkman/README.md index 0439f0050..10d09d2dc 100644 --- a/src/groovy-sdkman/README.md +++ b/src/groovy-sdkman/README.md @@ -9,11 +9,11 @@ application powerful features, including scripting capabilities, Domain-Specific Language authoring, runtime and compile-time meta-programming and functional programming. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/groovy-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/groovy-sdkman:2": {} } ``` @@ -26,3 +26,7 @@ programming. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/groovyserv-sdkman/README.md b/src/groovyserv-sdkman/README.md index cf09fc7df..0882304cc 100644 --- a/src/groovyserv-sdkman/README.md +++ b/src/groovyserv-sdkman/README.md @@ -5,11 +5,11 @@ GroovyServ reduces startup time of the JVM for runnning Groovy significantly. It depends on your environments, but in most cases, it’s 10 to 20 times faster than regular Groovy. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/groovyserv-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/groovyserv-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ regular Groovy. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/grpcurl-asdf/README.md b/src/grpcurl-asdf/README.md index 5e45f3419..04683896b 100644 --- a/src/grpcurl-asdf/README.md +++ b/src/grpcurl-asdf/README.md @@ -3,11 +3,11 @@ gRPCurl is a command-line tool that lets you interact with gRPC servers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/grpcurl-asdf:2": {} + "ghcr.io/devcontainers-extra/features/grpcurl-asdf:2": {} } ``` @@ -18,3 +18,7 @@ gRPCurl is a command-line tool that lets you interact with gRPC servers. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/grype/README.md b/src/grype/README.md index bcf0d95ab..db955832a 100644 --- a/src/grype/README.md +++ b/src/grype/README.md @@ -3,11 +3,11 @@ Grype is a vulnerability scanner for container images and filesystems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/grype:1": {} + "ghcr.io/devcontainers-extra/features/grype:1": {} } ``` @@ -18,3 +18,7 @@ Grype is a vulnerability scanner for container images and filesystems. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/gulp-cli/README.md b/src/gulp-cli/README.md index 231273a80..35312fc45 100644 --- a/src/gulp-cli/README.md +++ b/src/gulp-cli/README.md @@ -3,11 +3,11 @@ Gulp CLI is Command Line Utility for Gulp -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gulp-cli:2": {} + "ghcr.io/devcontainers-extra/features/gulp-cli:2": {} } ``` @@ -18,3 +18,7 @@ Gulp CLI is Command Line Utility for Gulp | version | Select the version of Gulp CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/hadoop-sdkman/README.md b/src/hadoop-sdkman/README.md index 3de9f5702..3828516c9 100644 --- a/src/hadoop-sdkman/README.md +++ b/src/hadoop-sdkman/README.md @@ -7,11 +7,11 @@ processing of large data sets across clusters of computersusing simple programming models.It is designed to scale up from single servers to thousands of machines, each offering local computation and storage. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/hadoop-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/hadoop-sdkman:2": {} } ``` @@ -24,3 +24,7 @@ of machines, each offering local computation and storage. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/haskell/README.md b/src/haskell/README.md index f4bd48054..aafcd0d09 100644 --- a/src/haskell/README.md +++ b/src/haskell/README.md @@ -7,7 +7,7 @@ Installs Haskell. An advanced, purely functional programming language ```json "features": { - "ghcr.io/devcontainers-contrib/features/haskell:2": {} + "ghcr.io/devcontainers-extra/features/haskell:2": {} } ``` @@ -28,4 +28,4 @@ Installs Haskell. An advanced, purely functional programming language --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/haskell/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/hatch/README.md b/src/hatch/README.md index 6f97dd53f..6d5af2ca7 100644 --- a/src/hatch/README.md +++ b/src/hatch/README.md @@ -3,11 +3,11 @@ Hatch is a modern, extensible Python project manager. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/hatch:2": {} + "ghcr.io/devcontainers-extra/features/hatch:2": {} } ``` @@ -18,3 +18,7 @@ Hatch is a modern, extensible Python project manager. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/haxe-asdf/README.md b/src/haxe-asdf/README.md index e98728ff1..d134c5f37 100644 --- a/src/haxe-asdf/README.md +++ b/src/haxe-asdf/README.md @@ -3,11 +3,11 @@ Haxe is an open source high-level cross-platform programming language and compiler. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/haxe-asdf:2": {} + "ghcr.io/devcontainers-extra/features/haxe-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Haxe is an open source high-level cross-platform programming language and compil | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/heroku-cli/README.md b/src/heroku-cli/README.md index 1463bd058..5de88a370 100644 --- a/src/heroku-cli/README.md +++ b/src/heroku-cli/README.md @@ -7,7 +7,7 @@ Heroku CLI allows you to interact with the Heroku API via the command line ```json "features": { - "ghcr.io/devcontainers-contrib/features/heroku-cli:1": {} + "ghcr.io/devcontainers-extra/features/heroku-cli:1": {} } ``` @@ -21,4 +21,4 @@ Heroku CLI allows you to interact with the Heroku API via the command line --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/heroku-cli/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/hotel/README.md b/src/hotel/README.md index 7af0d2c30..ba2e5ebf9 100644 --- a/src/hotel/README.md +++ b/src/hotel/README.md @@ -3,11 +3,11 @@ hotel is a simple process manager for developers. Start apps from your browser and access them using local domains. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/hotel:1": {} + "ghcr.io/devcontainers-extra/features/hotel:1": {} } ``` @@ -18,3 +18,7 @@ hotel is a simple process manager for developers. Start apps from your browser a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/how2/README.md b/src/how2/README.md index 3543a23b3..c09cb6f2d 100644 --- a/src/how2/README.md +++ b/src/how2/README.md @@ -3,11 +3,11 @@ how2 finds the simplest way to do something in a unix shell. It's like man, but you can query it in natural language. It uses a mix of AI code-completion and StackOverflow search. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/how2:1": {} + "ghcr.io/devcontainers-extra/features/how2:1": {} } ``` @@ -18,3 +18,7 @@ how2 finds the simplest way to do something in a unix shell. It's like man, but | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/http-server/README.md b/src/http-server/README.md index 29b802467..4e86ce1cf 100644 --- a/src/http-server/README.md +++ b/src/http-server/README.md @@ -3,11 +3,11 @@ http-server is a simple, zero-configuration command-line static HTTP server. It is powerful enough for production usage, but it's simple and hackable enough to be used for testing, local development and learning. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/http-server:1": {} + "ghcr.io/devcontainers-extra/features/http-server:1": {} } ``` @@ -18,3 +18,7 @@ http-server is a simple, zero-configuration command-line static HTTP server. It | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/http4k-sdkman/README.md b/src/http4k-sdkman/README.md index 55cd6b650..dbcae6c87 100644 --- a/src/http4k-sdkman/README.md +++ b/src/http4k-sdkman/README.md @@ -3,11 +3,11 @@ http4k is the Functional toolkit for building HTTP applications in Kotlin -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/http4k-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/http4k-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ http4k is the Functional toolkit for building HTTP applications in Kotlin | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/hyperfine/README.md b/src/hyperfine/README.md index e507eb287..c65b6d23f 100644 --- a/src/hyperfine/README.md +++ b/src/hyperfine/README.md @@ -3,11 +3,11 @@ hyperfine is a command-line benchmarking tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/hyperfine:1": {} + "ghcr.io/devcontainers-extra/features/hyperfine:1": {} } ``` @@ -18,3 +18,7 @@ hyperfine is a command-line benchmarking tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immuadmin-fips/README.md b/src/immuadmin-fips/README.md index c832c239b..1dd7dc9d0 100644 --- a/src/immuadmin-fips/README.md +++ b/src/immuadmin-fips/README.md @@ -3,11 +3,11 @@ immuadmin is the admin CLI for immudb and immugw. You can install and manage the service installation for both components and get statistics as well as runtime information. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immuadmin-fips:1": {} + "ghcr.io/devcontainers-extra/features/immuadmin-fips:1": {} } ``` @@ -18,3 +18,7 @@ immuadmin is the admin CLI for immudb and immugw. You can install and manage the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immuadmin/README.md b/src/immuadmin/README.md index de3b26a49..f0477db59 100644 --- a/src/immuadmin/README.md +++ b/src/immuadmin/README.md @@ -3,11 +3,11 @@ immuadmin is the admin CLI for immudb and immugw. You can install and manage the service installation for both components and get statistics as well as runtime information. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immuadmin:1": {} + "ghcr.io/devcontainers-extra/features/immuadmin:1": {} } ``` @@ -18,3 +18,7 @@ immuadmin is the admin CLI for immudb and immugw. You can install and manage the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immuclient-fips/README.md b/src/immuclient-fips/README.md index 363a264a5..1f9bf102e 100644 --- a/src/immuclient-fips/README.md +++ b/src/immuclient-fips/README.md @@ -3,11 +3,11 @@ immuclient is a CLI client for immudb - the lightweight, high-speed immutable database for systems and applications. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immuclient-fips:1": {} + "ghcr.io/devcontainers-extra/features/immuclient-fips:1": {} } ``` @@ -18,3 +18,7 @@ immuclient is a CLI client for immudb - the lightweight, high-speed immutable da | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immuclient/README.md b/src/immuclient/README.md index 04ceba845..a74ba7232 100644 --- a/src/immuclient/README.md +++ b/src/immuclient/README.md @@ -3,11 +3,11 @@ immuclient is a CLI client for immudb - the lightweight, high-speed immutable database for systems and applications. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immuclient:1": {} + "ghcr.io/devcontainers-extra/features/immuclient:1": {} } ``` @@ -18,3 +18,7 @@ immuclient is a CLI client for immudb - the lightweight, high-speed immutable da | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immudb-fips/README.md b/src/immudb-fips/README.md index 6d9ce0220..81351d271 100644 --- a/src/immudb-fips/README.md +++ b/src/immudb-fips/README.md @@ -3,11 +3,11 @@ immudb is an immutable database based on zero trust, SQL and Key-Value, tamperproof, data change history. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immudb-fips:1": {} + "ghcr.io/devcontainers-extra/features/immudb-fips:1": {} } ``` @@ -18,3 +18,7 @@ immudb is an immutable database based on zero trust, SQL and Key-Value, tamperpr | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/immudb/README.md b/src/immudb/README.md index 6ef29451f..4c50f94b7 100644 --- a/src/immudb/README.md +++ b/src/immudb/README.md @@ -3,11 +3,11 @@ immudb is an immutable database based on zero trust, SQL and Key-Value, tamperproof, data change history. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/immudb:1": {} + "ghcr.io/devcontainers-extra/features/immudb:1": {} } ``` @@ -18,3 +18,7 @@ immudb is an immutable database based on zero trust, SQL and Key-Value, tamperpr | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/infracost/README.md b/src/infracost/README.md index 24864ca11..7f5334060 100644 --- a/src/infracost/README.md +++ b/src/infracost/README.md @@ -3,11 +3,11 @@ Infracost shows cloud cost estimates for Terraform. It lets engineers see a cost breakdown and understand costs before making changes, either in the terminal, VS Code or pull requests. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/infracost:1": {} + "ghcr.io/devcontainers-extra/features/infracost:1": {} } ``` @@ -18,3 +18,7 @@ Infracost shows cloud cost estimates for Terraform. It lets engineers see a cost | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/infrastructor-sdkman/README.md b/src/infrastructor-sdkman/README.md index e224a7dcc..0857ecc3b 100644 --- a/src/infrastructor-sdkman/README.md +++ b/src/infrastructor-sdkman/README.md @@ -3,11 +3,11 @@ Infrastructor is an open source server provisioning tool written in Groovy -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/infrastructor-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/infrastructor-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Infrastructor is an open source server provisioning tool written in Groovy | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/invoke/README.md b/src/invoke/README.md index ecc2306ad..2e5c30973 100644 --- a/src/invoke/README.md +++ b/src/invoke/README.md @@ -3,11 +3,11 @@ Invoke is a Python (2.7 and 3.4+) library for managing shell-oriented subprocesses and organizing executable Python code into CLI-invokable tasks. It draws inspiration from various sources (make/rake, Fabric 1.x, etc) to arrive at a powerful & clean feature set. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/invoke:1": {} + "ghcr.io/devcontainers-extra/features/invoke:1": {} } ``` @@ -18,3 +18,7 @@ Invoke is a Python (2.7 and 3.4+) library for managing shell-oriented subprocess | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ionic-cli/README.md b/src/ionic-cli/README.md index e1bdfe634..b26bcc113 100644 --- a/src/ionic-cli/README.md +++ b/src/ionic-cli/README.md @@ -3,11 +3,11 @@ The Ionic command-line interface (CLI) is the go-to tool for developing Ionic apps. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ionic-cli:2": {} + "ghcr.io/devcontainers-extra/features/ionic-cli:2": {} } ``` @@ -18,3 +18,7 @@ The Ionic command-line interface (CLI) is the go-to tool for developing Ionic ap | version | Select the version of Ionic CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/isort/README.md b/src/isort/README.md index ae68947c1..40781040e 100644 --- a/src/isort/README.md +++ b/src/isort/README.md @@ -3,11 +3,11 @@ isort is a Python utility / library to sort imports alphabetically, and automatically separated into sections and by type. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/isort:2": {} + "ghcr.io/devcontainers-extra/features/isort:2": {} } ``` @@ -18,3 +18,7 @@ isort is a Python utility / library to sort imports alphabetically, and automati | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/istioctl/README.md b/src/istioctl/README.md index 9d1108d24..4220aae54 100644 --- a/src/istioctl/README.md +++ b/src/istioctl/README.md @@ -3,11 +3,11 @@ The istioctl tool is a configuration command line utility that allows service operators to debug and diagnose their Istio service mesh deployments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/istioctl:1": {} + "ghcr.io/devcontainers-extra/features/istioctl:1": {} } ``` @@ -18,3 +18,7 @@ The istioctl tool is a configuration command line utility that allows service op | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jake/README.md b/src/jake/README.md index 042cc13a1..0d63dd42c 100644 --- a/src/jake/README.md +++ b/src/jake/README.md @@ -3,11 +3,11 @@ jake is a tool to check your Python environments for vulnerable Open Source packages with OSS Index or Sonatype Nexus Lifecycle. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jake:1": {} + "ghcr.io/devcontainers-extra/features/jake:1": {} } ``` @@ -18,3 +18,7 @@ jake is a tool to check your Python environments for vulnerable Open Source pack | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jbake-sdkman/README.md b/src/jbake-sdkman/README.md index 3221f69bb..8546346c2 100644 --- a/src/jbake-sdkman/README.md +++ b/src/jbake-sdkman/README.md @@ -4,11 +4,11 @@ JBake is a Java based, open source, static site/blog generator for developers and designers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jbake-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/jbake-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ and designers. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jbang-sdkman/README.md b/src/jbang-sdkman/README.md index db14aafd1..36af31655 100644 --- a/src/jbang-sdkman/README.md +++ b/src/jbang-sdkman/README.md @@ -4,11 +4,11 @@ JBang makes it easy to use Java for scripting. It lets you use a single file for code and dependency management and allows you to run it directly. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jbang-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/jbang-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ code and dependency management and allows you to run it directly. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jenkinsx-cli/README.md b/src/jenkinsx-cli/README.md index f0bcf5d9e..9d64649f2 100644 --- a/src/jenkinsx-cli/README.md +++ b/src/jenkinsx-cli/README.md @@ -3,11 +3,11 @@ Jenkins X CLI is the modular command line CLI for Jenkins X 3.x -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jenkinsx-cli:1": {} + "ghcr.io/devcontainers-extra/features/jenkinsx-cli:1": {} } ``` @@ -18,3 +18,7 @@ Jenkins X CLI is the modular command line CLI for Jenkins X 3.x | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jest/README.md b/src/jest/README.md index 6eaba1eef..c4fa33d51 100644 --- a/src/jest/README.md +++ b/src/jest/README.md @@ -3,11 +3,11 @@ Jest is a delightful JavaScript Testing Framework with a focus on simplicity -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jest:2": {} + "ghcr.io/devcontainers-extra/features/jest:2": {} } ``` @@ -18,3 +18,7 @@ Jest is a delightful JavaScript Testing Framework with a focus on simplicity | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jfrog-cli-homebrew/README.md b/src/jfrog-cli-homebrew/README.md index deabf1dd8..a499d8b34 100644 --- a/src/jfrog-cli-homebrew/README.md +++ b/src/jfrog-cli-homebrew/README.md @@ -3,11 +3,11 @@ JFrog CLI is a compact and smart client that provides a simple interface that automates access to JFrog products simplifying your automation scripts making them more readable and easier to maintain. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jfrog-cli-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/jfrog-cli-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ JFrog CLI is a compact and smart client that provides a simple interface that au | version | Select the version of JFrog CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jfrog-cli-npm/README.md b/src/jfrog-cli-npm/README.md index 2ca1f7fff..0eea8653f 100644 --- a/src/jfrog-cli-npm/README.md +++ b/src/jfrog-cli-npm/README.md @@ -3,11 +3,11 @@ JFrog CLI is a compact and smart client that provides a simple interface that automates access to JFrog products simplifying your automation scripts making them more readable and easier to maintain. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jfrog-cli-npm:1": {} + "ghcr.io/devcontainers-extra/features/jfrog-cli-npm:1": {} } ``` @@ -18,3 +18,7 @@ JFrog CLI is a compact and smart client that provides a simple interface that au | version | Select the version of JFrog CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jfrog-cli/README.md b/src/jfrog-cli/README.md index 5aea74cbd..30dbf4edb 100644 --- a/src/jfrog-cli/README.md +++ b/src/jfrog-cli/README.md @@ -3,13 +3,22 @@ JFrog CLI is a compact and smart client that provides a simple interface that automates access to JFrog products simplifying your automation scripts making them more readable and easier to maintain. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jfrog-cli:1": {} + "ghcr.io/devcontainers-extra/features/jfrog-cli:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jira-cli/README.md b/src/jira-cli/README.md index 9479d8885..5d534513b 100644 --- a/src/jira-cli/README.md +++ b/src/jira-cli/README.md @@ -3,11 +3,11 @@ Jira CLI is an interactive command line tool for Atlassian Jira that will help you avoid Jira UI to some extent. This tool may not be able to do everything, but it has all the essential features required to improve your day-to-day workflow with Jira. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jira-cli:1": {} + "ghcr.io/devcontainers-extra/features/jira-cli:1": {} } ``` @@ -18,3 +18,7 @@ Jira CLI is an interactive command line tool for Atlassian Jira that will help y | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jmc-sdkman/README.md b/src/jmc-sdkman/README.md index 2fcc72106..7de48edbd 100644 --- a/src/jmc-sdkman/README.md +++ b/src/jmc-sdkman/README.md @@ -9,11 +9,11 @@ data collected by Java Flight Recorder. The tool chain enables developers and administrators to collect and analyze data from Java applications running locally or deployed in production environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jmc-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/jmc-sdkman:2": {} } ``` @@ -26,3 +26,7 @@ locally or deployed in production environments. | jdkDistro | Select or enter a JDK distribution to install | string | oracle | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jmeter-sdkman/README.md b/src/jmeter-sdkman/README.md index dd3bd4e88..98f7896b0 100644 --- a/src/jmeter-sdkman/README.md +++ b/src/jmeter-sdkman/README.md @@ -6,11 +6,11 @@ application designed to load test functional behavior and measure performance. It was originally designed for testing Web Applications but has since expanded to other test functions. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jmeter-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/jmeter-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ to other test functions. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/joern-sdkman/README.md b/src/joern-sdkman/README.md index 3f8b9f5c6..1e15b08ca 100644 --- a/src/joern-sdkman/README.md +++ b/src/joern-sdkman/README.md @@ -9,11 +9,11 @@ Scala-based domain-specific query language. Joern is developed with the goal of providing a useful tool for vulnerability discovery and research in static program analysis. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/joern-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/joern-sdkman:2": {} } ``` @@ -26,3 +26,7 @@ program analysis. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jreleaser-sdkman/README.md b/src/jreleaser-sdkman/README.md index f1c19262f..963073ffd 100644 --- a/src/jreleaser-sdkman/README.md +++ b/src/jreleaser-sdkman/README.md @@ -5,11 +5,11 @@ JReleaser is a release automation tool for Java projects. Its goal is to simplify creating releases and publishing artifacts to multiple package managers while providing customizable options. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jreleaser-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/jreleaser-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ while providing customizable options. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jrnl/README.md b/src/jrnl/README.md index b9024b59f..0bdec24b1 100644 --- a/src/jrnl/README.md +++ b/src/jrnl/README.md @@ -3,11 +3,11 @@ jrnl is a simple journal application for the command line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jrnl:2": {} + "ghcr.io/devcontainers-extra/features/jrnl:2": {} } ``` @@ -18,3 +18,7 @@ jrnl is a simple journal application for the command line. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jshint/README.md b/src/jshint/README.md index 113e0c6b7..5236c99c8 100644 --- a/src/jshint/README.md +++ b/src/jshint/README.md @@ -3,11 +3,11 @@ JSHint is a tool that helps to detect errors and potential problems in your JavaScript code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jshint:2": {} + "ghcr.io/devcontainers-extra/features/jshint:2": {} } ``` @@ -18,3 +18,7 @@ JSHint is a tool that helps to detect errors and potential problems in your Java | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jsii-diff/README.md b/src/jsii-diff/README.md index cada3a7f2..535a4bb87 100644 --- a/src/jsii-diff/README.md +++ b/src/jsii-diff/README.md @@ -3,11 +3,11 @@ jsii-diff compares two jsii assemblies for compatibility. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jsii-diff:1": {} + "ghcr.io/devcontainers-extra/features/jsii-diff:1": {} } ``` @@ -18,3 +18,7 @@ jsii-diff compares two jsii assemblies for compatibility. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jsii-pacmak/README.md b/src/jsii-pacmak/README.md index 12342b4ba..6dda500cc 100644 --- a/src/jsii-pacmak/README.md +++ b/src/jsii-pacmak/README.md @@ -3,11 +3,11 @@ jsii-pacmak generates ready-to-publish language-specific packages for jsii modules, part of the jsii project. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jsii-pacmak:1": {} + "ghcr.io/devcontainers-extra/features/jsii-pacmak:1": {} } ``` @@ -18,3 +18,7 @@ jsii-pacmak generates ready-to-publish language-specific packages for jsii modul | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jsii-rosetta/README.md b/src/jsii-rosetta/README.md index 1cf53990d..e483eabbe 100644 --- a/src/jsii-rosetta/README.md +++ b/src/jsii-rosetta/README.md @@ -3,11 +3,11 @@ jsii-rosetta is a utility to transcribe example code snippets from TypeScript to other jsii languages. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jsii-rosetta:1": {} + "ghcr.io/devcontainers-extra/features/jsii-rosetta:1": {} } ``` @@ -18,3 +18,7 @@ jsii-rosetta is a utility to transcribe example code snippets from TypeScript to | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/jsii/README.md b/src/jsii/README.md index 0d8b15195..1f3907aa0 100644 --- a/src/jsii/README.md +++ b/src/jsii/README.md @@ -3,11 +3,11 @@ jsii allows code in any language to naturally interact with JavaScript classes. It is the technology that enables the AWS Cloud Development Kit to deliver polyglot libraries from a single codebase! -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/jsii:1": {} + "ghcr.io/devcontainers-extra/features/jsii:1": {} } ``` @@ -18,3 +18,7 @@ jsii allows code in any language to naturally interact with JavaScript classes. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/json-server/README.md b/src/json-server/README.md index 707ffe81d..dba35a017 100644 --- a/src/json-server/README.md +++ b/src/json-server/README.md @@ -3,11 +3,11 @@ Get a full fake REST API with zero coding in less than 30 seconds. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/json-server:1": {} + "ghcr.io/devcontainers-extra/features/json-server:1": {} } ``` @@ -18,3 +18,7 @@ Get a full fake REST API with zero coding in less than 30 seconds. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/k2tf/README.md b/src/k2tf/README.md index c98b53330..318558803 100644 --- a/src/k2tf/README.md +++ b/src/k2tf/README.md @@ -3,11 +3,11 @@ k2tf is a tool for converting Kubernetes API Objects (in YAML format) into HashiCorp's Terraform configuration language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/k2tf:1": {} + "ghcr.io/devcontainers-extra/features/k2tf:1": {} } ``` @@ -18,3 +18,7 @@ k2tf is a tool for converting Kubernetes API Objects (in YAML format) into Hashi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/k6/README.md b/src/k6/README.md index eeb24e326..519b38447 100644 --- a/src/k6/README.md +++ b/src/k6/README.md @@ -3,11 +3,11 @@ k6 is an open-source load testing tool that makes performance testing easy and productive for engineering teams. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/k6:1": {} + "ghcr.io/devcontainers-extra/features/k6:1": {} } ``` @@ -18,3 +18,7 @@ k6 is an open-source load testing tool that makes performance testing easy and p | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/karaf-sdkman/README.md b/src/karaf-sdkman/README.md index cb6be6371..a45cd742d 100644 --- a/src/karaf-sdkman/README.md +++ b/src/karaf-sdkman/README.md @@ -7,11 +7,11 @@ you need for your applications. It runs on premise or on cloud. By polymorphic, it means that Karaf can host any kind of applications: WAR, OSGi, Spring, and much more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/karaf-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/karaf-sdkman:2": {} } ``` @@ -24,3 +24,7 @@ much more. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/keepercommander/README.md b/src/keepercommander/README.md index 6031e9162..f2ad217c8 100644 --- a/src/keepercommander/README.md +++ b/src/keepercommander/README.md @@ -3,11 +3,11 @@ Keeper Commander is a command-line and SDK interface to Keeper® Password Manager. Commander can be used to access and control your Keeper vault, perform administrative functions (such as end-user onboarding and data import/export), launch remote sessions, rotate passwords, eliminate hardcoded passwords and more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/keepercommander:1": {} + "ghcr.io/devcontainers-extra/features/keepercommander:1": {} } ``` @@ -18,3 +18,7 @@ Keeper Commander is a command-line and SDK interface to Keeper® Password Manage | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ki-sdkman/README.md b/src/ki-sdkman/README.md index 1c15d0bf5..2f8144ad0 100644 --- a/src/ki-sdkman/README.md +++ b/src/ki-sdkman/README.md @@ -5,11 +5,11 @@ An extensible implementation of the Kotlin REPL with a rich set of features including autocomplete, syntax highlighting, type inference and maven dependencies. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ki-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/ki-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ dependencies. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kind/README.md b/src/kind/README.md index cb67e7f38..108f19a6e 100644 --- a/src/kind/README.md +++ b/src/kind/README.md @@ -3,11 +3,11 @@ kind is a tool for running local Kubernetes clusters using Docker container 'nodes'. kind was primarily designed for testing Kubernetes itself, but may be used for local development or CI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kind:1": {} + "ghcr.io/devcontainers-extra/features/kind:1": {} } ``` @@ -18,3 +18,7 @@ kind is a tool for running local Kubernetes clusters using Docker container 'nod | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kobweb-sdkman/README.md b/src/kobweb-sdkman/README.md index ce8bd6b51..3fb1c1cf7 100644 --- a/src/kobweb-sdkman/README.md +++ b/src/kobweb-sdkman/README.md @@ -4,11 +4,11 @@ Kobweb is an opinionated Kotlin web framework built on top of Compose for Web. The CLI provides commands to help setup and manage your project. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kobweb-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/kobweb-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ The CLI provides commands to help setup and manage your project. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kops/README.md b/src/kops/README.md index 26974e639..ff9989b53 100644 --- a/src/kops/README.md +++ b/src/kops/README.md @@ -3,11 +3,11 @@ Kubernetes Operations (kOps) - Production Grade k8s Installation, Upgrades and Management. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kops:1": {} + "ghcr.io/devcontainers-extra/features/kops:1": {} } ``` @@ -18,3 +18,7 @@ Kubernetes Operations (kOps) - Production Grade k8s Installation, Upgrades and M | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kotlin-sdkman/README.md b/src/kotlin-sdkman/README.md index 754506c51..0bde8487d 100644 --- a/src/kotlin-sdkman/README.md +++ b/src/kotlin-sdkman/README.md @@ -4,11 +4,11 @@ Kotlin is a statically-typed programming language that runs on the Java Virtual Machine and can also be compiled to JavaScript source code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kotlin-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/kotlin-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ Machine and can also be compiled to JavaScript source code. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kscript-sdkman/README.md b/src/kscript-sdkman/README.md index b28be318f..bcfb0579f 100644 --- a/src/kscript-sdkman/README.md +++ b/src/kscript-sdkman/README.md @@ -5,11 +5,11 @@ Enhanced scripting support for Kotlin on *nix-based systems. kscript provides an easy-to-use, very flexible, and almost zero-overhead solution to write self-contained mini-applications with Kotlin. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kscript-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/kscript-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ self-contained mini-applications with Kotlin. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kubeclarity-cli/README.md b/src/kubeclarity-cli/README.md index 8b050121f..8c28cc8e9 100644 --- a/src/kubeclarity-cli/README.md +++ b/src/kubeclarity-cli/README.md @@ -3,11 +3,11 @@ KubeClarity is a tool for detection and management of Software Bill Of Materials (SBOM) and vulnerabilities of container images and filesystems. KubeClarity CLI can be run locally and especially useful for CI/CD pipelines. It allows to analyze images and directories to generate SBOM, and scan it for vulnerabilities. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kubeclarity-cli:1": {} + "ghcr.io/devcontainers-extra/features/kubeclarity-cli:1": {} } ``` @@ -18,3 +18,7 @@ KubeClarity is a tool for detection and management of Software Bill Of Materials | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kubectl-asdf/README.md b/src/kubectl-asdf/README.md index e31f5668a..c5d6244e2 100644 --- a/src/kubectl-asdf/README.md +++ b/src/kubectl-asdf/README.md @@ -3,11 +3,11 @@ Installs Kubectl -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kubectl-asdf:2": {} + "ghcr.io/devcontainers-extra/features/kubectl-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs Kubectl | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kubectx-kubens/README.md b/src/kubectx-kubens/README.md index 1fde5ccdd..c47cb792a 100644 --- a/src/kubectx-kubens/README.md +++ b/src/kubectx-kubens/README.md @@ -3,11 +3,11 @@ kubectx is a tool to switch between contexts (clusters) on kubectl faster. kubens is a tool to switch between Kubernetes namespaces (and configure them for kubectl) easily. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kubectx-kubens:1": {} + "ghcr.io/devcontainers-extra/features/kubectx-kubens:1": {} } ``` @@ -18,3 +18,7 @@ kubectx is a tool to switch between contexts (clusters) on kubectl faster. kuben | version | Select the version you would like to install (will apply for for both kubectx and kubens.) | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kubie/README.md b/src/kubie/README.md index 6566bb6e7..a9d4166a8 100644 --- a/src/kubie/README.md +++ b/src/kubie/README.md @@ -7,7 +7,7 @@ Kubie offers kubernetes context switching, namespace switching and prompt modifi ```json "features": { - "ghcr.io/devcontainers-contrib/features/kubie:1": {} + "ghcr.io/devcontainers-extra/features/kubie:1": {} } ``` @@ -21,4 +21,4 @@ Kubie offers kubernetes context switching, namespace switching and prompt modifi --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/kubie/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/kyverno-cli/README.md b/src/kyverno-cli/README.md index af8b21cff..6659eb026 100644 --- a/src/kyverno-cli/README.md +++ b/src/kyverno-cli/README.md @@ -3,11 +3,11 @@ Kyverno is a policy engine designed for Kubernetes. The Kyverno Command Line Interface (CLI) is designed to validate and test policy behavior to resources prior to adding them to a cluster. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kyverno-cli:1": {} + "ghcr.io/devcontainers-extra/features/kyverno-cli:1": {} } ``` @@ -18,3 +18,7 @@ Kyverno is a policy engine designed for Kubernetes. The Kyverno Command Line Int | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lastpass-cli-homebrew/README.md b/src/lastpass-cli-homebrew/README.md index 2a384dee4..b2d911802 100644 --- a/src/lastpass-cli-homebrew/README.md +++ b/src/lastpass-cli-homebrew/README.md @@ -3,11 +3,11 @@ The LastPass command line application is an open source project that allows you to create, edit, and retrieve passwords in your online LastPass vault. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lastpass-cli-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/lastpass-cli-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ The LastPass command line application is an open source project that allows you | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/layrry-sdkman/README.md b/src/layrry-sdkman/README.md index 2dfa82ca1..8739d0136 100644 --- a/src/layrry-sdkman/README.md +++ b/src/layrry-sdkman/README.md @@ -8,11 +8,11 @@ module layers, allowing multiple versions of one module to be used within an application at the same time, as well as dynamically adding and removing modules at application runtime. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/layrry-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/layrry-sdkman:2": {} } ``` @@ -25,3 +25,7 @@ at application runtime. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lean-asdf/README.md b/src/lean-asdf/README.md index 7159f2dfa..e5b5ec777 100644 --- a/src/lean-asdf/README.md +++ b/src/lean-asdf/README.md @@ -3,11 +3,11 @@ Lean is a functional programming language that makes it easy to write correct and maintainable code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lean-asdf:2": {} + "ghcr.io/devcontainers-extra/features/lean-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Lean is a functional programming language that makes it easy to write correct an | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lefthook-asdf/README.md b/src/lefthook-asdf/README.md index 48bdc7803..948caf082 100644 --- a/src/lefthook-asdf/README.md +++ b/src/lefthook-asdf/README.md @@ -3,11 +3,11 @@ Lefthook is the fastest polyglot Git hooks manager out there. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lefthook-asdf:1": {} + "ghcr.io/devcontainers-extra/features/lefthook-asdf:1": {} } ``` @@ -18,3 +18,7 @@ Lefthook is the fastest polyglot Git hooks manager out there. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/leiningen-sdkman/README.md b/src/leiningen-sdkman/README.md index 5b13c6717..c330268e8 100644 --- a/src/leiningen-sdkman/README.md +++ b/src/leiningen-sdkman/README.md @@ -5,11 +5,11 @@ Leiningen is the easiest way to use Clojure. With a focus on project automation and declarative configuration, it gets out of your way and lets you focus on your code. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/leiningen-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/leiningen-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ your code. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lektor/README.md b/src/lektor/README.md index 8c53c46ef..8378c0f16 100644 --- a/src/lektor/README.md +++ b/src/lektor/README.md @@ -3,11 +3,11 @@ Lektor is a static website generator. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lektor:2": {} + "ghcr.io/devcontainers-extra/features/lektor:2": {} } ``` @@ -18,3 +18,7 @@ Lektor is a static website generator. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lerna-npm/README.md b/src/lerna-npm/README.md index ee33326b5..11ff5df32 100644 --- a/src/lerna-npm/README.md +++ b/src/lerna-npm/README.md @@ -3,11 +3,11 @@ Lerna is a fast modern build system for managing and publishing multiple JavaScript/TypeScript packages from the same repository. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lerna-npm:1": {} + "ghcr.io/devcontainers-extra/features/lerna-npm:1": {} } ``` @@ -18,3 +18,7 @@ Lerna is a fast modern build system for managing and publishing multiple JavaScr | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/less/README.md b/src/less/README.md index 1c4e0f2a6..3500a6321 100644 --- a/src/less/README.md +++ b/src/less/README.md @@ -3,11 +3,11 @@ Less is a backwards-compatible language extension for CSS -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/less:2": {} + "ghcr.io/devcontainers-extra/features/less:2": {} } ``` @@ -18,3 +18,7 @@ Less is a backwards-compatible language extension for CSS | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/levant-asdf/README.md b/src/levant-asdf/README.md index 0e60803be..7f440784c 100644 --- a/src/levant-asdf/README.md +++ b/src/levant-asdf/README.md @@ -3,11 +3,11 @@ Levant is an open source templating and deployment tool for HashiCorp Nomad jobs -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/levant-asdf:2": {} + "ghcr.io/devcontainers-extra/features/levant-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Levant is an open source templating and deployment tool for HashiCorp Nomad jobs | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lighthouse-cli/README.md b/src/lighthouse-cli/README.md index c6b204e9f..3a815c588 100644 --- a/src/lighthouse-cli/README.md +++ b/src/lighthouse-cli/README.md @@ -3,11 +3,11 @@ Lighthouse CLI provides the most flexibility in how Lighthouse runs can be configured and reported. Users who want more advanced usage, or want to run Lighthouse in an automated fashion should use the Node CLI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lighthouse-cli:1": {} + "ghcr.io/devcontainers-extra/features/lighthouse-cli:1": {} } ``` @@ -18,3 +18,7 @@ Lighthouse CLI provides the most flexibility in how Lighthouse runs can be confi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/linkerd2-cli-edge/README.md b/src/linkerd2-cli-edge/README.md index cf98eb90f..2143e280c 100644 --- a/src/linkerd2-cli-edge/README.md +++ b/src/linkerd2-cli-edge/README.md @@ -3,11 +3,11 @@ The Linkerd CLI is the primary way to interact with Linkerd. It can install the control plane to your cluster, add the proxy to your service and provide detailed metrics for how your service is performing. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/linkerd2-cli-edge:1": {} + "ghcr.io/devcontainers-extra/features/linkerd2-cli-edge:1": {} } ``` @@ -18,3 +18,7 @@ The Linkerd CLI is the primary way to interact with Linkerd. It can install the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/linkerd2-cli-stable/README.md b/src/linkerd2-cli-stable/README.md index 4f3f1cfa1..e2acc227f 100644 --- a/src/linkerd2-cli-stable/README.md +++ b/src/linkerd2-cli-stable/README.md @@ -3,11 +3,11 @@ The Linkerd CLI is the primary way to interact with Linkerd. It can install the control plane to your cluster, add the proxy to your service and provide detailed metrics for how your service is performing. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/linkerd2-cli-stable:1": {} + "ghcr.io/devcontainers-extra/features/linkerd2-cli-stable:1": {} } ``` @@ -18,3 +18,7 @@ The Linkerd CLI is the primary way to interact with Linkerd. It can install the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/linode-cli/README.md b/src/linode-cli/README.md index 956fee6a7..bb5daf8a6 100644 --- a/src/linode-cli/README.md +++ b/src/linode-cli/README.md @@ -3,11 +3,11 @@ Access the entire Linode platform from the command line, easily adding, removing, or modifing services. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/linode-cli:1": {} + "ghcr.io/devcontainers-extra/features/linode-cli:1": {} } ``` @@ -18,3 +18,7 @@ Access the entire Linode platform from the command line, easily adding, removing | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/lite-server/README.md b/src/lite-server/README.md index 1924e0f54..259c3d278 100644 --- a/src/lite-server/README.md +++ b/src/lite-server/README.md @@ -3,11 +3,11 @@ lite-server is a lightweight development only node server that serves a web app, opens it in the browser, refreshes when html or javascript change, injects CSS changes using sockets, and has a fallback page when a route is not found. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/lite-server:1": {} + "ghcr.io/devcontainers-extra/features/lite-server:1": {} } ``` @@ -18,3 +18,7 @@ lite-server is a lightweight development only node server that serves a web app, | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/live-server/README.md b/src/live-server/README.md index 1ae55c315..6be503458 100644 --- a/src/live-server/README.md +++ b/src/live-server/README.md @@ -3,11 +3,11 @@ Live Server is a little development server with live reload capability. Use it for hacking your HTML/JavaScript/CSS files, but not for deploying the final site. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/live-server:1": {} + "ghcr.io/devcontainers-extra/features/live-server:1": {} } ``` @@ -18,3 +18,7 @@ Live Server is a little development server with live reload capability. Use it f | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/localstack/README.md b/src/localstack/README.md index d495009e1..c2e324131 100644 --- a/src/localstack/README.md +++ b/src/localstack/README.md @@ -3,11 +3,11 @@ Localstack is a fully functional local AWS cloud stack. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/localstack:2": {} + "ghcr.io/devcontainers-extra/features/localstack:2": {} } ``` @@ -18,3 +18,7 @@ Localstack is a fully functional local AWS cloud stack. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/localtunnel-npm/README.md b/src/localtunnel-npm/README.md index 8c33ec716..14f348993 100644 --- a/src/localtunnel-npm/README.md +++ b/src/localtunnel-npm/README.md @@ -3,11 +3,11 @@ Localtunnel allows you to easily share a web service on your local development machine without messing with DNS and firewall settings. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/localtunnel-npm:1": {} + "ghcr.io/devcontainers-extra/features/localtunnel-npm:1": {} } ``` @@ -18,3 +18,7 @@ Localtunnel allows you to easily share a web service on your local development m | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mackup/README.md b/src/mackup/README.md index 1be0ec051..d6377aec2 100644 --- a/src/mackup/README.md +++ b/src/mackup/README.md @@ -3,11 +3,11 @@ Mackup keeps your application settings in sync (OS X/Linux). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mackup:1": {} + "ghcr.io/devcontainers-extra/features/mackup:1": {} } ``` @@ -18,3 +18,7 @@ Mackup keeps your application settings in sync (OS X/Linux). | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/markdownlint-cli/README.md b/src/markdownlint-cli/README.md index 49c5f19fe..494b8e9d3 100644 --- a/src/markdownlint-cli/README.md +++ b/src/markdownlint-cli/README.md @@ -3,11 +3,11 @@ Command Line interface for MarkdownLint -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/markdownlint-cli:1": {} + "ghcr.io/devcontainers-extra/features/markdownlint-cli:1": {} } ``` @@ -18,3 +18,7 @@ Command Line interface for MarkdownLint | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/markdownlint-cli2/README.md b/src/markdownlint-cli2/README.md index a9f5cb25a..41dc531a4 100644 --- a/src/markdownlint-cli2/README.md +++ b/src/markdownlint-cli2/README.md @@ -3,11 +3,11 @@ A fast, flexible, configuration-based command-line interface for linting Markdown/CommonMark files with the markdownlint library. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/markdownlint-cli2:1": {} + "ghcr.io/devcontainers-extra/features/markdownlint-cli2:1": {} } ``` @@ -18,3 +18,7 @@ A fast, flexible, configuration-based command-line interface for linting Markdow | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/maven-sdkman/README.md b/src/maven-sdkman/README.md index 70d790bc7..d18a6b1ca 100644 --- a/src/maven-sdkman/README.md +++ b/src/maven-sdkman/README.md @@ -5,11 +5,11 @@ Apache Maven is a software project management and comprehension tool. Based on the concept of a project object model (POM), Maven can manage a project's build, reporting and documentation from a central piece of information. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/maven-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/maven-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ reporting and documentation from a central piece of information. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/meltano/README.md b/src/meltano/README.md index 8004af5d2..77db33664 100644 --- a/src/meltano/README.md +++ b/src/meltano/README.md @@ -3,11 +3,11 @@ Meltano lets you extract and load data with a software development-inspired approach that that delivers flexibility and limitless collaboration. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/meltano:2": {} + "ghcr.io/devcontainers-extra/features/meltano:2": {} } ``` @@ -18,3 +18,7 @@ Meltano lets you extract and load data with a software development-inspired appr | version | Select the version of Meltano ELT to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/memcached-exporter/README.md b/src/memcached-exporter/README.md index 890f6c0c4..00962f57a 100644 --- a/src/memcached-exporter/README.md +++ b/src/memcached-exporter/README.md @@ -3,11 +3,11 @@ The memcached exporter exports metrics from a memcached server for consumption by Prometheus. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/memcached-exporter:1": {} + "ghcr.io/devcontainers-extra/features/memcached-exporter:1": {} } ``` @@ -18,3 +18,7 @@ The memcached exporter exports metrics from a memcached server for consumption b | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/micro/README.md b/src/micro/README.md index 030709b73..8a410fc7a 100644 --- a/src/micro/README.md +++ b/src/micro/README.md @@ -3,13 +3,22 @@ micro is a terminal-based text editor that aims to be easy to use and intuitive, while also taking advantage of the capabilities of modern terminals. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/micro:1": {} + "ghcr.io/devcontainers-extra/features/micro:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/micronaut-sdkman/README.md b/src/micronaut-sdkman/README.md index 11a292b3d..5f561b92f 100644 --- a/src/micronaut-sdkman/README.md +++ b/src/micronaut-sdkman/README.md @@ -3,11 +3,11 @@ Micronaut is an open source microservice framework for the JVM -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/micronaut-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/micronaut-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Micronaut is an open source microservice framework for the JVM | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mitmproxy/README.md b/src/mitmproxy/README.md index 4c532be3f..db265c9a8 100644 --- a/src/mitmproxy/README.md +++ b/src/mitmproxy/README.md @@ -3,11 +3,11 @@ mitmproxy is an interactive TLS-capable intercepting HTTP proxy for penetration testers and software developers -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mitmproxy:2": {} + "ghcr.io/devcontainers-extra/features/mitmproxy:2": {} } ``` @@ -18,3 +18,7 @@ mitmproxy is an interactive TLS-capable intercepting HTTP proxy for penetration | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mkcert/README.md b/src/mkcert/README.md index 98ea737dc..86f8d93c1 100644 --- a/src/mkcert/README.md +++ b/src/mkcert/README.md @@ -3,11 +3,11 @@ mkcert is a simple tool for making locally-trusted development certificates. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mkcert:1": {} + "ghcr.io/devcontainers-extra/features/mkcert:1": {} } ``` @@ -18,3 +18,7 @@ mkcert is a simple tool for making locally-trusted development certificates. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mkdocs/README.md b/src/mkdocs/README.md index 50a237317..9ee794113 100644 --- a/src/mkdocs/README.md +++ b/src/mkdocs/README.md @@ -3,11 +3,11 @@ MkDocs is a fast, simple and downright gorgeous static site generator that's geared towards building project documentation. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mkdocs:2": {} + "ghcr.io/devcontainers-extra/features/mkdocs:2": {} } ``` @@ -19,3 +19,7 @@ MkDocs is a fast, simple and downright gorgeous static site generator that's gea | plugins | A space delimitered list of mkdocs plugins (will be injected into the mkdocs pipx env). see proposals for example | string | mkdocs-material | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mlocate-apt-get/README.md b/src/mlocate-apt-get/README.md index ecf3abf2b..e5165325d 100644 --- a/src/mlocate-apt-get/README.md +++ b/src/mlocate-apt-get/README.md @@ -3,13 +3,22 @@ mlocate is a locate/updatedb implementation used for quick lookup of file names. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mlocate-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/mlocate-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mlton-asdf/README.md b/src/mlton-asdf/README.md index 168b07645..0781c8397 100644 --- a/src/mlton-asdf/README.md +++ b/src/mlton-asdf/README.md @@ -3,11 +3,11 @@ MLton is a whole-program optimizing compiler for the Standard ML programming language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mlton-asdf:2": {} + "ghcr.io/devcontainers-extra/features/mlton-asdf:2": {} } ``` @@ -18,3 +18,7 @@ MLton is a whole-program optimizing compiler for the Standard ML programming lan | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mocha/README.md b/src/mocha/README.md index 3fc1a144b..ab4fb9a3f 100644 --- a/src/mocha/README.md +++ b/src/mocha/README.md @@ -3,11 +3,11 @@ Mocha is a feature-rich JavaScript test framework running on Node.js and in the browser, making asynchronous testing simple and fun. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mocha:2": {} + "ghcr.io/devcontainers-extra/features/mocha:2": {} } ``` @@ -18,3 +18,7 @@ Mocha is a feature-rich JavaScript test framework running on Node.js and in the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mongodb-atlas-cli-homebrew/README.md b/src/mongodb-atlas-cli-homebrew/README.md index 0f77e0fb9..b3565d2b8 100644 --- a/src/mongodb-atlas-cli-homebrew/README.md +++ b/src/mongodb-atlas-cli-homebrew/README.md @@ -3,11 +3,11 @@ The Atlas CLI is a command line interface built specifically for MongoDB Atlas. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mongodb-atlas-cli-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/mongodb-atlas-cli-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ The Atlas CLI is a command line interface built specifically for MongoDB Atlas. | version | Select the version of MongoDB Atlas CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mongosh-homebrew/README.md b/src/mongosh-homebrew/README.md index 28e8628dd..6fc412c74 100644 --- a/src/mongosh-homebrew/README.md +++ b/src/mongosh-homebrew/README.md @@ -3,11 +3,11 @@ MongoDB Shell to connect, configure, query, and work with your MongoDB database. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mongosh-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/mongosh-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ MongoDB Shell to connect, configure, query, and work with your MongoDB database. | version | Select the version of MongoDB Shell to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mosh-apt-get/README.md b/src/mosh-apt-get/README.md index 4e11097f8..77de40128 100644 --- a/src/mosh-apt-get/README.md +++ b/src/mosh-apt-get/README.md @@ -3,13 +3,22 @@ Mosh is a remote terminal application that allows roaming, supports intermittent connectivity, and provides intelligent local echo and line editing of user keystrokes. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mosh-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/mosh-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mosh-homebrew/README.md b/src/mosh-homebrew/README.md index 9695b0699..486ffc6e0 100644 --- a/src/mosh-homebrew/README.md +++ b/src/mosh-homebrew/README.md @@ -3,11 +3,11 @@ Mosh is a remote terminal application that allows roaming, supports intermittent connectivity, and provides intelligent local echo and line editing of user keystrokes. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mosh-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/mosh-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Mosh is a remote terminal application that allows roaming, supports intermittent | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mulefd-sdkman/README.md b/src/mulefd-sdkman/README.md index 16f5c407a..c21bcb70d 100644 --- a/src/mulefd-sdkman/README.md +++ b/src/mulefd-sdkman/README.md @@ -4,11 +4,11 @@ Mule Flow Diagrams is an open source tool that lets you generate flow dependency graph and diagrams for your Mule configurations. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mulefd-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/mulefd-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ graph and diagrams for your Mule configurations. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mvnd-sdkman/README.md b/src/mvnd-sdkman/README.md index 6a93a4c3b..5b36fe4cd 100644 --- a/src/mvnd-sdkman/README.md +++ b/src/mvnd-sdkman/README.md @@ -5,11 +5,11 @@ The mvnd project aims to provide a daemon infrastructure for maven based builds. It borrows techniques from Gradle and Takari to provide a simple and efficient system. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mvnd-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/mvnd-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ system. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mybatis-sdkman/README.md b/src/mybatis-sdkman/README.md index 16790dbc6..8f4a60613 100644 --- a/src/mybatis-sdkman/README.md +++ b/src/mybatis-sdkman/README.md @@ -6,11 +6,11 @@ provide database migrations for any database (new or existing) and make the current status of the database easily accessible and comprehensible. Installing this candidate provides the migrate command for managing database migrations -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mybatis-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/mybatis-sdkman:2": {} } ``` @@ -23,3 +23,7 @@ this candidate provides the migrate command for managing database migrations | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mypy/README.md b/src/mypy/README.md index 234c608ae..15bc5b198 100644 --- a/src/mypy/README.md +++ b/src/mypy/README.md @@ -3,11 +3,11 @@ Mypy is a static type checker for Python. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mypy:2": {} + "ghcr.io/devcontainers-extra/features/mypy:2": {} } ``` @@ -18,3 +18,7 @@ Mypy is a static type checker for Python. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mysql-homebrew/README.md b/src/mysql-homebrew/README.md index 379f48023..2a6e553f8 100644 --- a/src/mysql-homebrew/README.md +++ b/src/mysql-homebrew/README.md @@ -3,11 +3,11 @@ MySQL is an open-source relational database management system (RDBMS) -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mysql-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/mysql-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ MySQL is an open-source relational database management system (RDBMS) | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/mysqld-exporter/README.md b/src/mysqld-exporter/README.md index 53caa00cc..b01bd3c57 100644 --- a/src/mysqld-exporter/README.md +++ b/src/mysqld-exporter/README.md @@ -3,11 +3,11 @@ Prometheus exporter for MySQL server metrics. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mysqld-exporter:1": {} + "ghcr.io/devcontainers-extra/features/mysqld-exporter:1": {} } ``` @@ -18,3 +18,7 @@ Prometheus exporter for MySQL server metrics. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/n8n/README.md b/src/n8n/README.md index ccd40a75b..2ab5d46e5 100644 --- a/src/n8n/README.md +++ b/src/n8n/README.md @@ -3,11 +3,11 @@ n8n is a free and source-available fair-code licensed workflow automation tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/n8n:1": {} + "ghcr.io/devcontainers-extra/features/n8n:1": {} } ``` @@ -18,3 +18,7 @@ n8n is a free and source-available fair-code licensed workflow automation tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nancy/README.md b/src/nancy/README.md index f33a192ba..85da948b4 100644 --- a/src/nancy/README.md +++ b/src/nancy/README.md @@ -3,11 +3,11 @@ Nancy is a tool to check for vulnerabilities in your Golang dependencies, powered by Sonatype OSS Index. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nancy:1": {} + "ghcr.io/devcontainers-extra/features/nancy:1": {} } ``` @@ -18,3 +18,7 @@ Nancy is a tool to check for vulnerabilities in your Golang dependencies, powere | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/navi/README.md b/src/navi/README.md index 248aa09e2..0a88764d0 100644 --- a/src/navi/README.md +++ b/src/navi/README.md @@ -3,11 +3,11 @@ navi is an interactive cheatsheet tool for the command-line. navi allows you to browse through cheatsheets (that you may write yourself or download from maintainers) and execute commands. Suggested values for arguments are dynamically displayed in a list. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/navi:1": {} + "ghcr.io/devcontainers-extra/features/navi:1": {} } ``` @@ -18,3 +18,7 @@ navi is an interactive cheatsheet tool for the command-line. navi allows you to | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ncdu/README.md b/src/ncdu/README.md index 2eeef602c..5dac6b013 100644 --- a/src/ncdu/README.md +++ b/src/ncdu/README.md @@ -3,13 +3,22 @@ ncdu (NCurses Disk Usage) is a disk utility for Unix systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ncdu:1": {} + "ghcr.io/devcontainers-extra/features/ncdu:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/neko-asdf/README.md b/src/neko-asdf/README.md index 84838b763..a5602efb7 100644 --- a/src/neko-asdf/README.md +++ b/src/neko-asdf/README.md @@ -3,11 +3,11 @@ Installs Neko Virtual Machine -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/neko-asdf:2": {} + "ghcr.io/devcontainers-extra/features/neko-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs Neko Virtual Machine | version | Select the version of Neko Virtual Machine to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/neo4jmigrations-sdkman/README.md b/src/neo4jmigrations-sdkman/README.md index e33885918..bacda5710 100644 --- a/src/neo4jmigrations-sdkman/README.md +++ b/src/neo4jmigrations-sdkman/README.md @@ -5,11 +5,11 @@ Neo4j-Migrations is a database migration and refactoring tool that allows running Cypher scripts and programmatic refactorings in a controlled and repeatable fashion against one or more Neo4j database. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/neo4jmigrations-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/neo4jmigrations-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ repeatable fashion against one or more Neo4j database. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/neofetch/README.md b/src/neofetch/README.md index 6a5c79624..b9cfb66a4 100644 --- a/src/neofetch/README.md +++ b/src/neofetch/README.md @@ -3,13 +3,22 @@ Neofetch displays information about your operating system, software and hardware in an aesthetic and visually pleasing way. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/neofetch:1": {} + "ghcr.io/devcontainers-extra/features/neofetch:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/neovim-apt-get/README.md b/src/neovim-apt-get/README.md index b45053a9c..45914583e 100644 --- a/src/neovim-apt-get/README.md +++ b/src/neovim-apt-get/README.md @@ -3,13 +3,22 @@ Neovim is a fork of Vim focused on modern code and features, rather than running in legacy environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/neovim-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/neovim-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/neovim-homebrew/README.md b/src/neovim-homebrew/README.md index c8057f766..1eb1b6a32 100644 --- a/src/neovim-homebrew/README.md +++ b/src/neovim-homebrew/README.md @@ -3,11 +3,11 @@ Neovim is a fork of Vim focused on modern code and features, rather than running in legacy environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/neovim-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/neovim-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Neovim is a fork of Vim focused on modern code and features, rather than running | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nestjs-cli/README.md b/src/nestjs-cli/README.md index b4f74ffe3..3f17d3b17 100644 --- a/src/nestjs-cli/README.md +++ b/src/nestjs-cli/README.md @@ -3,11 +3,11 @@ Nestjs is a progressive Node.js framework for building efficient, reliable and scalable server-side applications. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nestjs-cli:2": {} + "ghcr.io/devcontainers-extra/features/nestjs-cli:2": {} } ``` @@ -18,3 +18,7 @@ Nestjs is a progressive Node.js framework for building efficient, reliable and s | version | Select the version of NestJS CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/netdata/README.md b/src/netdata/README.md index 1d832ae6d..39b8cf18c 100644 --- a/src/netdata/README.md +++ b/src/netdata/README.md @@ -3,13 +3,22 @@ Netdata is a distributed, real-time, performance and health monitoring platform for systems, hardware, containers and applications, collecting thousands of useful metrics with zero configuration needed. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/netdata:1": {} + "ghcr.io/devcontainers-extra/features/netdata:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/netlify-cli/README.md b/src/netlify-cli/README.md index f9790ab3f..720ecfa3f 100644 --- a/src/netlify-cli/README.md +++ b/src/netlify-cli/README.md @@ -3,11 +3,11 @@ Netlify CLI lets you configure continuous deployment straight from the command line. You can use Netlify CLI to run a local development server that you can share with others, run a local build and plugins, and deploy your site. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/netlify-cli:1": {} + "ghcr.io/devcontainers-extra/features/netlify-cli:1": {} } ``` @@ -18,3 +18,7 @@ Netlify CLI lets you configure continuous deployment straight from the command l | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nim-asdf/README.md b/src/nim-asdf/README.md index 4d8e03e93..0c826b575 100644 --- a/src/nim-asdf/README.md +++ b/src/nim-asdf/README.md @@ -3,11 +3,11 @@ Nim is a statically typed compiled systems programming language. It combines successful concepts from mature languages like Python, Ada and Modula. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nim-asdf:2": {} + "ghcr.io/devcontainers-extra/features/nim-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Nim is a statically typed compiled systems programming language. It combines suc | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ninja-asdf/README.md b/src/ninja-asdf/README.md index 66c5982e7..ca4e9050f 100644 --- a/src/ninja-asdf/README.md +++ b/src/ninja-asdf/README.md @@ -3,11 +3,11 @@ Ninja is a small build system with a focus on speed. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ninja-asdf:2": {} + "ghcr.io/devcontainers-extra/features/ninja-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Ninja is a small build system with a focus on speed. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nmap-apt-get/README.md b/src/nmap-apt-get/README.md index f70d1ecf7..ebf57c535 100644 --- a/src/nmap-apt-get/README.md +++ b/src/nmap-apt-get/README.md @@ -3,13 +3,22 @@ Nmap (Network Mapper) is a free and open source utility for network discovery and security auditing. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nmap-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/nmap-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nmap-homebrew/README.md b/src/nmap-homebrew/README.md index 052c160e2..94cd389a3 100644 --- a/src/nmap-homebrew/README.md +++ b/src/nmap-homebrew/README.md @@ -3,11 +3,11 @@ Nmap (Network Mapper) is a free and open source utility for network discovery and security auditing. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nmap-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/nmap-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Nmap (Network Mapper) is a free and open source utility for network discovery an | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nnn-apt-get/README.md b/src/nnn-apt-get/README.md index 93213eb21..7f35dfc91 100644 --- a/src/nnn-apt-get/README.md +++ b/src/nnn-apt-get/README.md @@ -3,13 +3,22 @@ nnn is a free and open-source file manager which provides a text-based user interface to provide file managing functionalities for Unix-like systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nnn-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/nnn-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nnn-homebrew/README.md b/src/nnn-homebrew/README.md index a097ce3d9..7ad706ada 100644 --- a/src/nnn-homebrew/README.md +++ b/src/nnn-homebrew/README.md @@ -3,11 +3,11 @@ nnn is a free and open-source file manager which provides a text-based user interface to provide file managing functionalities for Unix-like systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nnn-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/nnn-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ nnn is a free and open-source file manager which provides a text-based user inte | version | Select the version of nnn (n³) to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/node-asdf/README.md b/src/node-asdf/README.md index d7feb60ed..c84e4185f 100644 --- a/src/node-asdf/README.md +++ b/src/node-asdf/README.md @@ -3,11 +3,11 @@ Installs Node.js via asdf. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/node-asdf:0": {} + "ghcr.io/devcontainers-extra/features/node-asdf:0": {} } ``` @@ -18,3 +18,7 @@ Installs Node.js via asdf. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/node-exporter/README.md b/src/node-exporter/README.md index de5ccfa2f..3a8dfa5b7 100644 --- a/src/node-exporter/README.md +++ b/src/node-exporter/README.md @@ -3,11 +3,11 @@ Prometheus exporter for hardware and OS metrics exposed by *NIX kernels, written in Go with pluggable metric collectors. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/node-exporter:1": {} + "ghcr.io/devcontainers-extra/features/node-exporter:1": {} } ``` @@ -18,3 +18,7 @@ Prometheus exporter for hardware and OS metrics exposed by *NIX kernels, written | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nomad-asdf/README.md b/src/nomad-asdf/README.md index 0c490a5ed..72178cbd0 100644 --- a/src/nomad-asdf/README.md +++ b/src/nomad-asdf/README.md @@ -3,11 +3,11 @@ Nomad is an easy-to-use, flexible, and performant workload orchestrator that can deploy a mix of microservice, batch, containerized, and non-containerized applications. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nomad-asdf:2": {} + "ghcr.io/devcontainers-extra/features/nomad-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Nomad is an easy-to-use, flexible, and performant workload orchestrator that can | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nox/README.md b/src/nox/README.md index 6d7fafc6b..54d16b9f9 100644 --- a/src/nox/README.md +++ b/src/nox/README.md @@ -3,11 +3,11 @@ nox is a command-line tool that automates testing in multiple Python environments. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nox:2": {} + "ghcr.io/devcontainers-extra/features/nox:2": {} } ``` @@ -18,3 +18,7 @@ nox is a command-line tool that automates testing in multiple Python environment | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/npm-package/README.md b/src/npm-package/README.md index 96c453625..8bf3850ae 100644 --- a/src/npm-package/README.md +++ b/src/npm-package/README.md @@ -1,13 +1,13 @@ -# npm package (npm-package) +# NPM package (npm-package) -Installs an npm package. +Installs an npm package globally. ## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/npm-package:1": {} + "ghcr.io/devcontainers-extra/features/npm-package:1": {} } ``` @@ -22,4 +22,4 @@ Installs an npm package. --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/npm-package/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/nx-npm/README.md b/src/nx-npm/README.md index 5152c54f5..9c7839176 100644 --- a/src/nx-npm/README.md +++ b/src/nx-npm/README.md @@ -3,11 +3,11 @@ Nx is a smart, fast and extensible build system with first class monorepo support and powerful integrations. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nx-npm:1": {} + "ghcr.io/devcontainers-extra/features/nx-npm:1": {} } ``` @@ -18,3 +18,7 @@ Nx is a smart, fast and extensible build system with first class monorepo suppor | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ocaml-asdf/README.md b/src/ocaml-asdf/README.md index 115dbb136..1733f4faf 100644 --- a/src/ocaml-asdf/README.md +++ b/src/ocaml-asdf/README.md @@ -3,11 +3,11 @@ OCaml is a general-purpose, industrial-strength programming language with an emphasis on expressiveness and safety. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ocaml-asdf:2": {} + "ghcr.io/devcontainers-extra/features/ocaml-asdf:2": {} } ``` @@ -18,3 +18,7 @@ OCaml is a general-purpose, industrial-strength programming language with an emp | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/oclif/README.md b/src/oclif/README.md index 078d30690..c0d3a88ec 100644 --- a/src/oclif/README.md +++ b/src/oclif/README.md @@ -3,11 +3,11 @@ The oclif generator creates a CLI project in TypeScript to get you started quickly. It requires very few runtime dependencies and has extremely minimal overhead. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/oclif:1": {} + "ghcr.io/devcontainers-extra/features/oclif:1": {} } ``` @@ -18,3 +18,7 @@ The oclif generator creates a CLI project in TypeScript to get you started quick | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/opa/README.md b/src/opa/README.md index 893f34a0d..c5a2f4a2f 100644 --- a/src/opa/README.md +++ b/src/opa/README.md @@ -3,11 +3,11 @@ Open Policy Agent (OPA) is an open source, general-purpose policy engine that enables unified, context-aware policy enforcement across the entire stack. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/opa:1": {} + "ghcr.io/devcontainers-extra/features/opa:1": {} } ``` @@ -18,3 +18,7 @@ Open Policy Agent (OPA) is an open source, general-purpose policy engine that en | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/opam-asdf/README.md b/src/opam-asdf/README.md index 62369115d..73031b51e 100644 --- a/src/opam-asdf/README.md +++ b/src/opam-asdf/README.md @@ -3,11 +3,11 @@ opam is a source-based package manager for OCaml. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/opam-asdf:2": {} + "ghcr.io/devcontainers-extra/features/opam-asdf:2": {} } ``` @@ -18,3 +18,7 @@ opam is a source-based package manager for OCaml. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ory-cli/README.md b/src/ory-cli/README.md index bb567bb2e..63208579f 100644 --- a/src/ory-cli/README.md +++ b/src/ory-cli/README.md @@ -3,11 +3,11 @@ Ory CLI is a convenient and easy-to-use tool that helps you manage and configure The Ory Network. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ory-cli:1": {} + "ghcr.io/devcontainers-extra/features/ory-cli:1": {} } ``` @@ -18,3 +18,7 @@ Ory CLI is a convenient and easy-to-use tool that helps you manage and configure | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ory-hydra/README.md b/src/ory-hydra/README.md index ba64fb2d2..94bb7bc90 100644 --- a/src/ory-hydra/README.md +++ b/src/ory-hydra/README.md @@ -3,11 +3,11 @@ Ory Hydra is a hardened, OpenID Certified OAuth 2.0 Server and OpenID Connect Provider optimized for low-latency, high throughput, and low resource consumption. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ory-hydra:1": {} + "ghcr.io/devcontainers-extra/features/ory-hydra:1": {} } ``` @@ -18,3 +18,7 @@ Ory Hydra is a hardened, OpenID Certified OAuth 2.0 Server and OpenID Connect Pr | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ory-kratos/README.md b/src/ory-kratos/README.md index 8efe52864..c03f3329a 100644 --- a/src/ory-kratos/README.md +++ b/src/ory-kratos/README.md @@ -3,11 +3,11 @@ Ory Kratos is the developer-friendly, security-hardened and battle-test Identity, User Management and Authentication system for the Cloud. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ory-kratos:1": {} + "ghcr.io/devcontainers-extra/features/ory-kratos:1": {} } ``` @@ -18,3 +18,7 @@ Ory Kratos is the developer-friendly, security-hardened and battle-test Identity | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ory-oathkeeper/README.md b/src/ory-oathkeeper/README.md index 2a8ecc8c2..50b9b0f23 100644 --- a/src/ory-oathkeeper/README.md +++ b/src/ory-oathkeeper/README.md @@ -3,11 +3,11 @@ ORY Oathkeeper is an Identity & Access Proxy (IAP) and Access Control Decision API that authorizes HTTP requests based on sets of Access Rules. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ory-oathkeeper:1": {} + "ghcr.io/devcontainers-extra/features/ory-oathkeeper:1": {} } ``` @@ -18,3 +18,7 @@ ORY Oathkeeper is an Identity & Access Proxy (IAP) and Access Control Decision A | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/packer-asdf/README.md b/src/packer-asdf/README.md index 1bda53195..63c803400 100644 --- a/src/packer-asdf/README.md +++ b/src/packer-asdf/README.md @@ -3,11 +3,11 @@ Packer is a free and open source tool for creating golden images for multiple platforms from a single source configuration. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/packer-asdf:2": {} + "ghcr.io/devcontainers-extra/features/packer-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Packer is a free and open source tool for creating golden images for multiple pl | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pandoc/README.md b/src/pandoc/README.md index d8192f539..f6339d371 100644 --- a/src/pandoc/README.md +++ b/src/pandoc/README.md @@ -3,11 +3,11 @@ Pandoc is a Haskell library for converting from one markup format to another, and a command-line tool that uses this library. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pandoc:1": {} + "ghcr.io/devcontainers-extra/features/pandoc:1": {} } ``` @@ -18,3 +18,7 @@ Pandoc is a Haskell library for converting from one markup format to another, an | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pass-apt-get/README.md b/src/pass-apt-get/README.md index d22ee4e9b..ec5b120f2 100644 --- a/src/pass-apt-get/README.md +++ b/src/pass-apt-get/README.md @@ -3,13 +3,22 @@ pass is a very simple password store that encrypts passwords using gpg and places the encrypted password in a directory. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pass-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/pass-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pdm/README.md b/src/pdm/README.md index 982e0251e..74cd997e6 100644 --- a/src/pdm/README.md +++ b/src/pdm/README.md @@ -3,11 +3,11 @@ PDM is a modern Python package and dependency manager supporting the latest PEP standards. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pdm:2": {} + "ghcr.io/devcontainers-extra/features/pdm:2": {} } ``` @@ -18,3 +18,7 @@ PDM is a modern Python package and dependency manager supporting the latest PEP | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/peco-asdf/README.md b/src/peco-asdf/README.md index 6c0929787..f77b034cd 100644 --- a/src/peco-asdf/README.md +++ b/src/peco-asdf/README.md @@ -3,11 +3,11 @@ peco is simplistic interactive filtering tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/peco-asdf:2": {} + "ghcr.io/devcontainers-extra/features/peco-asdf:2": {} } ``` @@ -18,3 +18,7 @@ peco is simplistic interactive filtering tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/perl-asdf/README.md b/src/perl-asdf/README.md index 540838fc2..6912e05ca 100644 --- a/src/perl-asdf/README.md +++ b/src/perl-asdf/README.md @@ -3,11 +3,11 @@ Perl is a general-purpose, interpreted, dynamic programming language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/perl-asdf:2": {} + "ghcr.io/devcontainers-extra/features/perl-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Perl is a general-purpose, interpreted, dynamic programming language. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pierrot-sdkman/README.md b/src/pierrot-sdkman/README.md index 9b18c9b04..5a432cdb7 100644 --- a/src/pierrot-sdkman/README.md +++ b/src/pierrot-sdkman/README.md @@ -3,11 +3,11 @@ Pierrot helps you to manage many GitHub repositories with a single command. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pierrot-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/pierrot-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Pierrot helps you to manage many GitHub repositories with a single command. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pipenv/README.md b/src/pipenv/README.md index b9eca9223..e975184fc 100644 --- a/src/pipenv/README.md +++ b/src/pipenv/README.md @@ -3,11 +3,11 @@ Pipenv automatically creates and manages a virtualenv for your projects. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pipenv:2": {} + "ghcr.io/devcontainers-extra/features/pipenv:2": {} } ``` @@ -18,3 +18,7 @@ Pipenv automatically creates and manages a virtualenv for your projects. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pipx-package/README.md b/src/pipx-package/README.md index 2f1784ec5..043f5ab1d 100644 --- a/src/pipx-package/README.md +++ b/src/pipx-package/README.md @@ -18,9 +18,11 @@ Installs a pipx package. | package | Select the pipx package to install. | string | - | | version | Select the version of the pipx package to install. | string | latest | | injections | Space delimitered list of python packages to inject into the main package env | string | - | +| includeDeps | Include apps of dependent packages | boolean | false | +| interpreter | Force python interpreter to be use (must already exists on the container). If none selected (the default) then python3 will be used (and will be installed in case it does not exists) | string | - | --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/pipx-package/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pnpm/README.md b/src/pnpm/README.md index 67345c6f7..91e5967c0 100644 --- a/src/pnpm/README.md +++ b/src/pnpm/README.md @@ -3,11 +3,11 @@ Pnpm is a fast and disk space efficient package manager. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pnpm:2": {} + "ghcr.io/devcontainers-extra/features/pnpm:2": {} } ``` @@ -18,3 +18,7 @@ Pnpm is a fast and disk space efficient package manager. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/podman-homebrew/README.md b/src/podman-homebrew/README.md index e31a61657..ade7d0811 100644 --- a/src/podman-homebrew/README.md +++ b/src/podman-homebrew/README.md @@ -3,11 +3,11 @@ Podman is a tool for managing containers and images, volumes mounted into those containers, and pods made from groups of containers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/podman-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/podman-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Podman is a tool for managing containers and images, volumes mounted into those | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/poetry/README.md b/src/poetry/README.md index 3f32583b7..75df7669b 100644 --- a/src/poetry/README.md +++ b/src/poetry/README.md @@ -3,11 +3,11 @@ Poetry is a tool for dependency management and packaging in Python. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/poetry:2": {} + "ghcr.io/devcontainers-extra/features/poetry:2": {} } ``` @@ -18,3 +18,7 @@ Poetry is a tool for dependency management and packaging in Python. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pomchecker-sdkman/README.md b/src/pomchecker-sdkman/README.md index d49a9a8d4..90e1e3c06 100644 --- a/src/pomchecker-sdkman/README.md +++ b/src/pomchecker-sdkman/README.md @@ -4,11 +4,11 @@ Pomchecker - Checks that POM files comply with the minimum rules required for publication to Maven Central. It can also check if a POM is a valid BOM file. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pomchecker-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/pomchecker-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ publication to Maven Central. It can also check if a POM is a valid BOM file. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/poppler-utils-apt-get/README.md b/src/poppler-utils-apt-get/README.md index 18ebbfcd3..e27bf1e9a 100644 --- a/src/poppler-utils-apt-get/README.md +++ b/src/poppler-utils-apt-get/README.md @@ -15,13 +15,22 @@ pdftops - convert PDF to printable PS format pdftotext - extract all text from PDF pdfunite - merges several PDFs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/poppler-utils-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/poppler-utils-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/powerbi-visuals-tools/README.md b/src/powerbi-visuals-tools/README.md index 515cbb6e1..706a74fa6 100644 --- a/src/powerbi-visuals-tools/README.md +++ b/src/powerbi-visuals-tools/README.md @@ -3,11 +3,11 @@ PowerBI Visual Tools Contains tools for building/packaging Power BI visuals -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/powerbi-visuals-tools:2": {} + "ghcr.io/devcontainers-extra/features/powerbi-visuals-tools:2": {} } ``` @@ -18,3 +18,7 @@ PowerBI Visual Tools Contains tools for building/packaging Power BI visuals | version | Select the version of PowerBI Visual Tools (pbiviz) to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/powershell/README.md b/src/powershell/README.md index 6c4ba1ea4..a1a35568b 100644 --- a/src/powershell/README.md +++ b/src/powershell/README.md @@ -3,11 +3,11 @@ PowerShell is a cross-platform (Windows, Linux, and macOS) automation and configuration tool/framework that works well with your existing tools and is optimized for dealing with structured data (e.g. JSON, CSV, XML, etc.), REST APIs, and object models. It includes a command-line shell, an associated scripting language and a framework for processing cmdlets. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/powershell:1": {} + "ghcr.io/devcontainers-extra/features/powershell:1": {} } ``` @@ -18,3 +18,7 @@ PowerShell is a cross-platform (Windows, Linux, and macOS) automation and config | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pre-commit/README.md b/src/pre-commit/README.md index aa1cefcd2..117e408e3 100644 --- a/src/pre-commit/README.md +++ b/src/pre-commit/README.md @@ -3,11 +3,11 @@ Pre-Commit is a framework for managing and maintaining multi-language pre-commit hooks. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pre-commit:2": {} + "ghcr.io/devcontainers-extra/features/pre-commit:2": {} } ``` @@ -18,3 +18,7 @@ Pre-Commit is a framework for managing and maintaining multi-language pre-commit | version | Select the version of Pre-Commit to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/prettier/README.md b/src/prettier/README.md index ef31093f9..a0f458547 100644 --- a/src/prettier/README.md +++ b/src/prettier/README.md @@ -3,11 +3,11 @@ Prettier is an opinionated code formatter. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/prettier:1": {} + "ghcr.io/devcontainers-extra/features/prettier:1": {} } ``` @@ -18,3 +18,7 @@ Prettier is an opinionated code formatter. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/prisma/README.md b/src/prisma/README.md index 34f335111..2dc598bcb 100644 --- a/src/prisma/README.md +++ b/src/prisma/README.md @@ -3,11 +3,11 @@ Prisma is a next-generation ORM for Node.js & TypeScript | PostgreSQL, MySQL, MariaDB, SQL Server, SQLite, MongoDB and CockroachDB. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/prisma:2": {} + "ghcr.io/devcontainers-extra/features/prisma:2": {} } ``` @@ -18,3 +18,7 @@ Prisma is a next-generation ORM for Node.js & TypeScript | PostgreSQL, MySQL, Ma | version | Select the version of Prisma CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/projen/README.md b/src/projen/README.md index 046fe74c3..524437838 100644 --- a/src/projen/README.md +++ b/src/projen/README.md @@ -3,11 +3,11 @@ projen synthesizes project configuration files such as package.json, tsconfig.json, .gitignore, GitHub Workflows, eslint, jest, etc from a well-typed definition written in JavaScript. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/projen:1": {} + "ghcr.io/devcontainers-extra/features/projen:1": {} } ``` @@ -18,3 +18,7 @@ projen synthesizes project configuration files such as package.json, tsconfig.js | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/prometheus/README.md b/src/prometheus/README.md index a6cca82f2..d6ea0d9a0 100644 --- a/src/prometheus/README.md +++ b/src/prometheus/README.md @@ -3,11 +3,11 @@ Prometheus, a Cloud Native Computing Foundation project, is a systems and service monitoring system. It collects metrics from configured targets at given intervals, evaluates rule expressions, displays the results, and can trigger alerts when specified conditions are observed. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/prometheus:1": {} + "ghcr.io/devcontainers-extra/features/prometheus:1": {} } ``` @@ -18,3 +18,7 @@ Prometheus, a Cloud Native Computing Foundation project, is a systems and servic | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/promlens/README.md b/src/promlens/README.md index fa458c5e8..4ca49baee 100644 --- a/src/promlens/README.md +++ b/src/promlens/README.md @@ -3,11 +3,11 @@ PromLens is a web-based PromQL query builder, analyzer, and visualizer. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/promlens:1": {} + "ghcr.io/devcontainers-extra/features/promlens:1": {} } ``` @@ -18,3 +18,7 @@ PromLens is a web-based PromQL query builder, analyzer, and visualizer. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/protoc-asdf/README.md b/src/protoc-asdf/README.md index 73622d450..2fd3dd599 100644 --- a/src/protoc-asdf/README.md +++ b/src/protoc-asdf/README.md @@ -3,11 +3,11 @@ protoc is the protocol buffer compiler -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/protoc-asdf:1": {} + "ghcr.io/devcontainers-extra/features/protoc-asdf:1": {} } ``` @@ -18,3 +18,7 @@ protoc is the protocol buffer compiler | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/protoc/README.md b/src/protoc/README.md index 3a3142556..469b29f47 100644 --- a/src/protoc/README.md +++ b/src/protoc/README.md @@ -3,11 +3,11 @@ A compiler for protocol buffer definition files. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/protoc:1": {} + "ghcr.io/devcontainers-extra/features/protoc:1": {} } ``` @@ -18,3 +18,7 @@ A compiler for protocol buffer definition files. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pulumi/README.md b/src/pulumi/README.md index 336bc35b3..46d91dafd 100644 --- a/src/pulumi/README.md +++ b/src/pulumi/README.md @@ -7,7 +7,7 @@ Pulumi is a modern infrastructure as code platform ```json "features": { - "ghcr.io/devcontainers-contrib/features/pulumi:1": {} + "ghcr.io/devcontainers-extra/features/pulumi:1": {} } ``` @@ -22,4 +22,4 @@ Pulumi is a modern infrastructure as code platform --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/pulumi/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pushgateway/README.md b/src/pushgateway/README.md index 57ee4ecbe..cfeb22534 100644 --- a/src/pushgateway/README.md +++ b/src/pushgateway/README.md @@ -3,11 +3,11 @@ The Prometheus Pushgateway exists to allow ephemeral and batch jobs to expose their metrics to Prometheus. Since these kinds of jobs may not exist long enough to be scraped, they can instead push their metrics to a Pushgateway. The Pushgateway then exposes these metrics to Prometheus. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pushgateway:1": {} + "ghcr.io/devcontainers-extra/features/pushgateway:1": {} } ``` @@ -18,3 +18,7 @@ The Prometheus Pushgateway exists to allow ephemeral and batch jobs to expose th | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pyinfra/README.md b/src/pyinfra/README.md index 1f0cb6d94..582736fb1 100644 --- a/src/pyinfra/README.md +++ b/src/pyinfra/README.md @@ -3,11 +3,11 @@ pyinfra is a Python alternative to Ansible where you don't write your deployment in YAML file, but in Python. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pyinfra:2": {} + "ghcr.io/devcontainers-extra/features/pyinfra:2": {} } ``` @@ -18,3 +18,7 @@ pyinfra is a Python alternative to Ansible where you don't write your deployment | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pylint/README.md b/src/pylint/README.md index c064f91fd..568739170 100644 --- a/src/pylint/README.md +++ b/src/pylint/README.md @@ -3,11 +3,11 @@ Pylint is a static code analyser for Python 2 or 3. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pylint:2": {} + "ghcr.io/devcontainers-extra/features/pylint:2": {} } ``` @@ -18,3 +18,7 @@ Pylint is a static code analyser for Python 2 or 3. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pyoxidizer/README.md b/src/pyoxidizer/README.md index c1333ef6d..433cae6b6 100644 --- a/src/pyoxidizer/README.md +++ b/src/pyoxidizer/README.md @@ -3,11 +3,11 @@ PyOxidizer is a modern Python application packaging and distribution tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pyoxidizer:1": {} + "ghcr.io/devcontainers-extra/features/pyoxidizer:1": {} } ``` @@ -18,3 +18,7 @@ PyOxidizer is a modern Python application packaging and distribution tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/pyscaffold/README.md b/src/pyscaffold/README.md index b9414da4c..0ade376eb 100644 --- a/src/pyscaffold/README.md +++ b/src/pyscaffold/README.md @@ -3,11 +3,11 @@ PyScaffold is a python project template generator with batteries included. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pyscaffold:2": {} + "ghcr.io/devcontainers-extra/features/pyscaffold:2": {} } ``` @@ -18,3 +18,7 @@ PyScaffold is a python project template generator with batteries included. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/qrcode/README.md b/src/qrcode/README.md index 631064bf7..8779344cd 100644 --- a/src/qrcode/README.md +++ b/src/qrcode/README.md @@ -3,11 +3,11 @@ qrcode is a command line QR-Code generator. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/qrcode:2": {} + "ghcr.io/devcontainers-extra/features/qrcode:2": {} } ``` @@ -18,3 +18,7 @@ qrcode is a command line QR-Code generator. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/quarkus-sdkman/README.md b/src/quarkus-sdkman/README.md index e4fb9ece8..4c4f6f3ff 100644 --- a/src/quarkus-sdkman/README.md +++ b/src/quarkus-sdkman/README.md @@ -4,11 +4,11 @@ Quarkus is a Kubernetes Native Java framework tailored for OpenJDK HotSpot and GraalVM, crafted from best-of-breed Java libraries and standards. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/quarkus-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/quarkus-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ GraalVM, crafted from best-of-breed Java libraries and standards. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/quasar-cli/README.md b/src/quasar-cli/README.md index 78c138693..d80dec8f4 100644 --- a/src/quasar-cli/README.md +++ b/src/quasar-cli/README.md @@ -3,11 +3,11 @@ Quasar is an MIT licensed open-source Vue.js based framework, which allows you as a web developer to quickly create responsive websites/apps in many flavours. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/quasar-cli:2": {} + "ghcr.io/devcontainers-extra/features/quasar-cli:2": {} } ``` @@ -18,3 +18,7 @@ Quasar is an MIT licensed open-source Vue.js based framework, which allows you a | version | Select the version of Quasar CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/raku-asdf/README.md b/src/raku-asdf/README.md index 7d46ff547..64d93869c 100644 --- a/src/raku-asdf/README.md +++ b/src/raku-asdf/README.md @@ -3,11 +3,11 @@ Raku is a general-purpose, interpreted, dynamic programming language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/raku-asdf:1": {} + "ghcr.io/devcontainers-extra/features/raku-asdf:1": {} } ``` @@ -18,3 +18,7 @@ Raku is a general-purpose, interpreted, dynamic programming language. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/rclone/README.md b/src/rclone/README.md index 57a265b2d..6b0893924 100644 --- a/src/rclone/README.md +++ b/src/rclone/README.md @@ -3,11 +3,11 @@ Rclone ('rsync for cloud storage') is a command-line program to sync files and directories to and from different cloud storage providers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/rclone:1": {} + "ghcr.io/devcontainers-extra/features/rclone:1": {} } ``` @@ -18,3 +18,7 @@ Rclone ('rsync for cloud storage') is a command-line program to sync files and d | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/redis-homebrew/README.md b/src/redis-homebrew/README.md index 089958034..2a60306b6 100644 --- a/src/redis-homebrew/README.md +++ b/src/redis-homebrew/README.md @@ -3,11 +3,11 @@ Redis is an in-memory data structure store, used as a distributed, in-memory key-value database, cache and message broker, with optional durability. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/redis-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/redis-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Redis is an in-memory data structure store, used as a distributed, in-memory key | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/rekor-cli/README.md b/src/rekor-cli/README.md index 13ef3e602..780e0b9d5 100644 --- a/src/rekor-cli/README.md +++ b/src/rekor-cli/README.md @@ -3,11 +3,11 @@ Rekor provide an immutable, tamper-resistant ledger of metadata generated within a software project supply chain. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/rekor-cli:1": {} + "ghcr.io/devcontainers-extra/features/rekor-cli:1": {} } ``` @@ -18,3 +18,7 @@ Rekor provide an immutable, tamper-resistant ledger of metadata generated within | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/renovate-cli/README.md b/src/renovate-cli/README.md index 1fae77b04..50641c101 100644 --- a/src/renovate-cli/README.md +++ b/src/renovate-cli/README.md @@ -3,11 +3,11 @@ Renovate is a universal dependency update tool. Multi-platform and multi-language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/renovate-cli:2": {} + "ghcr.io/devcontainers-extra/features/renovate-cli:2": {} } ``` @@ -18,3 +18,7 @@ Renovate is a universal dependency update tool. Multi-platform and multi-languag | version | Select the version of Renovate CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ripgrep/README.md b/src/ripgrep/README.md index 4e876d7f3..9355c327e 100644 --- a/src/ripgrep/README.md +++ b/src/ripgrep/README.md @@ -3,11 +3,11 @@ ripgrep recursively searches directories for a regex pattern while respecting your gitignore. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ripgrep:1": {} + "ghcr.io/devcontainers-extra/features/ripgrep:1": {} } ``` @@ -18,3 +18,7 @@ ripgrep recursively searches directories for a regex pattern while respecting yo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/rollup/README.md b/src/rollup/README.md index f63f79d98..52497f841 100644 --- a/src/rollup/README.md +++ b/src/rollup/README.md @@ -3,11 +3,11 @@ Rollup is a module bundler for JavaScript which compiles small pieces of code into something larger and more complex, such as a library or application. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/rollup:2": {} + "ghcr.io/devcontainers-extra/features/rollup:2": {} } ``` @@ -18,3 +18,7 @@ Rollup is a module bundler for JavaScript which compiles small pieces of code in | version | Select the version of rollup.js to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ruby-asdf/README.md b/src/ruby-asdf/README.md index 40bc0bbf1..9c4d0f247 100644 --- a/src/ruby-asdf/README.md +++ b/src/ruby-asdf/README.md @@ -3,11 +3,11 @@ Installs Ruby via asdf. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ruby-asdf:0": {} + "ghcr.io/devcontainers-extra/features/ruby-asdf:0": {} } ``` @@ -18,3 +18,7 @@ Installs Ruby via asdf. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ruff/README.md b/src/ruff/README.md index 3393baefb..6c052cea7 100644 --- a/src/ruff/README.md +++ b/src/ruff/README.md @@ -3,11 +3,11 @@ Ruff is an extremely fast Python linter, written in Rust. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ruff:1": {} + "ghcr.io/devcontainers-extra/features/ruff:1": {} } ``` @@ -18,3 +18,7 @@ Ruff is an extremely fast Python linter, written in Rust. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/salesforce-cli/README.md b/src/salesforce-cli/README.md index 8d8230794..ac6e291a0 100644 --- a/src/salesforce-cli/README.md +++ b/src/salesforce-cli/README.md @@ -3,11 +3,11 @@ The Salesforce CLI is a powerful command line interface that simplifies development and build automation when working with your Salesforce org. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/salesforce-cli:1": {} + "ghcr.io/devcontainers-extra/features/salesforce-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Salesforce CLI is a powerful command line interface that simplifies developm | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/salesforce-sfdx/README.md b/src/salesforce-sfdx/README.md index ee0ea7d49..ea65b568c 100644 --- a/src/salesforce-sfdx/README.md +++ b/src/salesforce-sfdx/README.md @@ -3,11 +3,11 @@ sfdx provide you with the ability to develop and test your apps more easily on Salesforce Platform. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/salesforce-sfdx:1": {} + "ghcr.io/devcontainers-extra/features/salesforce-sfdx:1": {} } ``` @@ -18,3 +18,7 @@ sfdx provide you with the ability to develop and test your apps more easily on S | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sanity-cli/README.md b/src/sanity-cli/README.md index dfee0173c..517579f1f 100644 --- a/src/sanity-cli/README.md +++ b/src/sanity-cli/README.md @@ -3,11 +3,11 @@ The sanity Command Line Interface (CLI) is a collection of tools for managing, developing, debugging, and deploying your Sanity Studio projects. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sanity-cli:1": {} + "ghcr.io/devcontainers-extra/features/sanity-cli:1": {} } ``` @@ -18,3 +18,7 @@ The sanity Command Line Interface (CLI) is a collection of tools for managing, d | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sap-piper/README.md b/src/sap-piper/README.md index aba9e90bc..8ae8fe9f0 100644 --- a/src/sap-piper/README.md +++ b/src/sap-piper/README.md @@ -3,11 +3,11 @@ Piper offers default pipelines to easily implement CI/CD processes integrating SAP systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sap-piper:1": {} + "ghcr.io/devcontainers-extra/features/sap-piper:1": {} } ``` @@ -18,3 +18,7 @@ Piper offers default pipelines to easily implement CI/CD processes integrating S | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sbt-sdkman/README.md b/src/sbt-sdkman/README.md index 70b627740..551088f95 100644 --- a/src/sbt-sdkman/README.md +++ b/src/sbt-sdkman/README.md @@ -9,11 +9,11 @@ repositories); continuous compilation, testing, and deployment; integration with the Scala interpreter for rapid iteration and debugging; support for mixed Java/Scala projects -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sbt-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/sbt-sdkman:2": {} } ``` @@ -26,3 +26,7 @@ Java/Scala projects | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/scala-sdkman/README.md b/src/scala-sdkman/README.md index 68c12cd42..a62164fcb 100644 --- a/src/scala-sdkman/README.md +++ b/src/scala-sdkman/README.md @@ -15,11 +15,11 @@ contravariance, higher-order types, and anonymous types. Other features of Scala include operator overloading, optional parameters, named parameters, raw strings, and no checked exceptions. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/scala-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/scala-sdkman:2": {} } ``` @@ -32,3 +32,7 @@ strings, and no checked exceptions. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/scalacli-sdkman/README.md b/src/scalacli-sdkman/README.md index 009264f96..ad22695a1 100644 --- a/src/scalacli-sdkman/README.md +++ b/src/scalacli-sdkman/README.md @@ -4,11 +4,11 @@ Scala CLI is a command-line tool to interact with the Scala language. It lets you compile, run, test, and package your Scala code (and more!) -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/scalacli-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/scalacli-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ you compile, run, test, and package your Scala code (and more!) | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/scancode-toolkit/README.md b/src/scancode-toolkit/README.md index c06ce5c4e..d94bfa165 100644 --- a/src/scancode-toolkit/README.md +++ b/src/scancode-toolkit/README.md @@ -3,11 +3,11 @@ ScanCode scan code to detect packages and dependencies, licenses, copyrights and more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/scancode-toolkit:1": {} + "ghcr.io/devcontainers-extra/features/scancode-toolkit:1": {} } ``` @@ -18,3 +18,7 @@ ScanCode scan code to detect packages and dependencies, licenses, copyrights and | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/schemacrawler-sdkman/README.md b/src/schemacrawler-sdkman/README.md index 5f940ccdb..dbaa2018e 100644 --- a/src/schemacrawler-sdkman/README.md +++ b/src/schemacrawler-sdkman/README.md @@ -10,11 +10,11 @@ SchemaCrawler also generates schema diagrams. You can execute scripts in any standard scripting language against your database. You can find potential schema design issues with lint. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/schemacrawler-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/schemacrawler-sdkman:2": {} } ``` @@ -27,3 +27,7 @@ design issues with lint. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sentinel-asdf/README.md b/src/sentinel-asdf/README.md index c90134921..93e7c5ef9 100644 --- a/src/sentinel-asdf/README.md +++ b/src/sentinel-asdf/README.md @@ -3,11 +3,11 @@ Sentinel is an embeddable policy as code framework to enable fine-grained, logic-based policy decisions that can be extended to source external information to make decisions. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sentinel-asdf:2": {} + "ghcr.io/devcontainers-extra/features/sentinel-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Sentinel is an embeddable policy as code framework to enable fine-grained, logic | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/serf-asdf/README.md b/src/serf-asdf/README.md index 73c10fa62..7e3233bea 100644 --- a/src/serf-asdf/README.md +++ b/src/serf-asdf/README.md @@ -3,11 +3,11 @@ Serf is a decentralized solution for cluster membership, failure detection, and orchestration. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/serf-asdf:2": {} + "ghcr.io/devcontainers-extra/features/serf-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Serf is a decentralized solution for cluster membership, failure detection, and | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/shfmt/README.md b/src/shfmt/README.md index 2a659095f..a78737aaf 100644 --- a/src/shfmt/README.md +++ b/src/shfmt/README.md @@ -3,11 +3,11 @@ Shfmt is a shell parser, formatter, and interpreter. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/shfmt:1.0.0": {} + "ghcr.io/devcontainers-extra/features/shfmt:1": {} } ``` @@ -16,3 +16,9 @@ Shfmt is a shell parser, formatter, and interpreter. | Options Id | Description | Type | Default Value | |-----|-----|-----|-----| | version | Select the version to install. | string | latest | + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/shopify-cli/README.md b/src/shopify-cli/README.md index d47b694ba..b7a65679d 100644 --- a/src/shopify-cli/README.md +++ b/src/shopify-cli/README.md @@ -3,11 +3,11 @@ Shopify CLI is a command-line interface tool that helps you build Shopify apps and themes. It quickly generates Shopify apps, themes, and custom storefronts. You can also use it to automate many common development tasks. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/shopify-cli:1": {} + "ghcr.io/devcontainers-extra/features/shopify-cli:1": {} } ``` @@ -18,3 +18,7 @@ Shopify CLI is a command-line interface tool that helps you build Shopify apps a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sigstore-python/README.md b/src/sigstore-python/README.md index fec02622d..1c68aa0d0 100644 --- a/src/sigstore-python/README.md +++ b/src/sigstore-python/README.md @@ -3,11 +3,11 @@ sigstore-python is a Python tool for generating and verifying Sigstore signatures. You can use it to sign and verify Python package distributions, or anything else! -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sigstore-python:1": {} + "ghcr.io/devcontainers-extra/features/sigstore-python:1": {} } ``` @@ -18,3 +18,7 @@ sigstore-python is a Python tool for generating and verifying Sigstore signature | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/snyk-cli/README.md b/src/snyk-cli/README.md index 87c405af3..fc82e1a94 100644 --- a/src/snyk-cli/README.md +++ b/src/snyk-cli/README.md @@ -3,11 +3,11 @@ Snyk CLI scans and monitors your projects for security vulnerabilities. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/snyk-cli:1": {} + "ghcr.io/devcontainers-extra/features/snyk-cli:1": {} } ``` @@ -18,3 +18,7 @@ Snyk CLI scans and monitors your projects for security vulnerabilities. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sops/README.md b/src/sops/README.md index 7e86edde1..e06bc8b6e 100644 --- a/src/sops/README.md +++ b/src/sops/README.md @@ -3,11 +3,11 @@ sops is an editor of encrypted files that supports YAML, JSON, ENV, INI and BINARY formats and encrypts with AWS KMS, GCP KMS, Azure Key Vault, age, and PGP. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sops:1": {} + "ghcr.io/devcontainers-extra/features/sops:1": {} } ``` @@ -18,3 +18,7 @@ sops is an editor of encrypted files that supports YAML, JSON, ENV, INI and BINA | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/spacectl/README.md b/src/spacectl/README.md index 9e5b3caf7..3074f76a3 100644 --- a/src/spacectl/README.md +++ b/src/spacectl/README.md @@ -3,11 +3,11 @@ spacectl is a utility wrapping Spacelift's GraphQL API for easy programmatic access in command-line contexts - either in manual interactive mode (in your local shell), or in a predefined CI pipeline (GitHub actions, CircleCI, Jenkins etc). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/spacectl:1": {} + "ghcr.io/devcontainers-extra/features/spacectl:1": {} } ``` @@ -18,3 +18,7 @@ spacectl is a utility wrapping Spacelift's GraphQL API for easy programmatic acc | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/spark-sdkman/README.md b/src/spark-sdkman/README.md index faed9b419..7185e7c30 100644 --- a/src/spark-sdkman/README.md +++ b/src/spark-sdkman/README.md @@ -5,11 +5,11 @@ Apache Spark is an open-source cluster-computing framework. Spark provides an interface for programming entire clusters with implicit data parallelism and fault-tolerance. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/spark-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/spark-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ fault-tolerance. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/spicedb/README.md b/src/spicedb/README.md index bc3a4679e..c4cadfd81 100644 --- a/src/spicedb/README.md +++ b/src/spicedb/README.md @@ -3,11 +3,11 @@ SpiceDB is an open source, Google Zanzibar-inspired, database system for creating and managing security-critical application permissions. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/spicedb:1": {} + "ghcr.io/devcontainers-extra/features/spicedb:1": {} } ``` @@ -18,3 +18,7 @@ SpiceDB is an open source, Google Zanzibar-inspired, database system for creatin | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/springboot-sdkman/README.md b/src/springboot-sdkman/README.md index 41b23daea..5ed58619e 100644 --- a/src/springboot-sdkman/README.md +++ b/src/springboot-sdkman/README.md @@ -5,11 +5,11 @@ Spring Boot takes an opinionated view of building production-ready Spring applications. It favors convention over configuration and is designed to get you up and running as quickly as possible. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/springboot-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/springboot-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ up and running as quickly as possible. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sqlfluff/README.md b/src/sqlfluff/README.md index 519f416dc..b0555e4c3 100644 --- a/src/sqlfluff/README.md +++ b/src/sqlfluff/README.md @@ -3,11 +3,11 @@ Fluff is an extensible and modular linter designed to help you write good SQL and catch errors and bad SQL before it hits your database. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sqlfluff:1": {} + "ghcr.io/devcontainers-extra/features/sqlfluff:1": {} } ``` @@ -19,3 +19,7 @@ Fluff is an extensible and modular linter designed to help you write good SQL an | plugins | A space delimitered list of sqlfluff plugins (will be injected into the sqlfluff pipx env). See proposals for examples. | string | - | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/squarespace-server/README.md b/src/squarespace-server/README.md index b37d82878..1bc195e32 100644 --- a/src/squarespace-server/README.md +++ b/src/squarespace-server/README.md @@ -3,11 +3,11 @@ The Squarespace local development server is a command line tool that sets up a test server on your computer, allowing you to see changes to your template before making them live -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/squarespace-server:1": {} + "ghcr.io/devcontainers-extra/features/squarespace-server:1": {} } ``` @@ -18,3 +18,7 @@ The Squarespace local development server is a command line tool that sets up a t | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sshoogr-sdkman/README.md b/src/sshoogr-sdkman/README.md index 0ee30beea..007ff00a4 100644 --- a/src/sshoogr-sdkman/README.md +++ b/src/sshoogr-sdkman/README.md @@ -4,11 +4,11 @@ Sshoogr is a Groovy based DSL and command line tool for working with remote servers through SSH. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sshoogr-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/sshoogr-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ servers through SSH. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/starship-homebrew/README.md b/src/starship-homebrew/README.md index b7607bf5f..28c62ae48 100644 --- a/src/starship-homebrew/README.md +++ b/src/starship-homebrew/README.md @@ -3,11 +3,11 @@ Starship is fast and highly customizable cross-shell prompt that can display contextual information. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/starship-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/starship-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Starship is fast and highly customizable cross-shell prompt that can display con | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/starship/README.md b/src/starship/README.md index de1d78e56..9ecc4d65a 100644 --- a/src/starship/README.md +++ b/src/starship/README.md @@ -3,11 +3,11 @@ Starship is fast and highly customizable cross-shell prompt that can display contextual information. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/starship:1": {} + "ghcr.io/devcontainers-extra/features/starship:1": {} } ``` @@ -18,3 +18,7 @@ Starship is fast and highly customizable cross-shell prompt that can display con | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/statsd-exporter/README.md b/src/statsd-exporter/README.md index ad2a4f463..40dc3129d 100644 --- a/src/statsd-exporter/README.md +++ b/src/statsd-exporter/README.md @@ -3,11 +3,11 @@ The StatsD exporter is a drop-in replacement for StatsD. This exporter translates StatsD metrics to Prometheus metrics via configured mapping rules. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/statsd-exporter:1": {} + "ghcr.io/devcontainers-extra/features/statsd-exporter:1": {} } ``` @@ -18,3 +18,7 @@ The StatsD exporter is a drop-in replacement for StatsD. This exporter translate | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/stew/README.md b/src/stew/README.md index ab20d689c..af12524b5 100644 --- a/src/stew/README.md +++ b/src/stew/README.md @@ -3,11 +3,11 @@ stew is an independent package manager for compiled binaries. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/stew:1": {} + "ghcr.io/devcontainers-extra/features/stew:1": {} } ``` @@ -18,3 +18,7 @@ stew is an independent package manager for compiled binaries. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/supabase-cli/README.md b/src/supabase-cli/README.md index f0e68f807..b6211d0b6 100644 --- a/src/supabase-cli/README.md +++ b/src/supabase-cli/README.md @@ -3,11 +3,11 @@ The Supabase CLI provides tools to develop your project locally and deploy to the Supabase Platform. You can also use the CLI to manage your Supabase projects, handle database migrations and CI/CD workflows, and generate types directly from your database schema. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/supabase-cli:1": {} + "ghcr.io/devcontainers-extra/features/supabase-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Supabase CLI provides tools to develop your project locally and deploy to th | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/surge-cli/README.md b/src/surge-cli/README.md index 6542fec06..005ff7086 100644 --- a/src/surge-cli/README.md +++ b/src/surge-cli/README.md @@ -3,11 +3,11 @@ Publish HTML, CSS, and JS for free, without leaving the command line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/surge-cli:1": {} + "ghcr.io/devcontainers-extra/features/surge-cli:1": {} } ``` @@ -18,3 +18,7 @@ Publish HTML, CSS, and JS for free, without leaving the command line. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/sv2v/README.md b/src/sv2v/README.md index e924b3e88..dfd7a2acd 100644 --- a/src/sv2v/README.md +++ b/src/sv2v/README.md @@ -3,11 +3,11 @@ sv2v converts SystemVerilog (IEEE 1800-2017) to Verilog (IEEE 1364-2005), with an emphasis on supporting synthesizable language constructs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sv2v:1": {} + "ghcr.io/devcontainers-extra/features/sv2v:1": {} } ``` @@ -18,3 +18,7 @@ sv2v converts SystemVerilog (IEEE 1800-2017) to Verilog (IEEE 1364-2005), with a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/svu-asdf/README.md b/src/svu-asdf/README.md index 155843916..d735237a5 100644 --- a/src/svu-asdf/README.md +++ b/src/svu-asdf/README.md @@ -3,11 +3,11 @@ svu is a tool to manage semantic versions at ease. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/svu-asdf:2": {} + "ghcr.io/devcontainers-extra/features/svu-asdf:2": {} } ``` @@ -18,3 +18,7 @@ svu is a tool to manage semantic versions at ease. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/syft/README.md b/src/syft/README.md index 41445d86e..ef7ec5770 100644 --- a/src/syft/README.md +++ b/src/syft/README.md @@ -3,11 +3,11 @@ Syft is A CLI tool and Go library for generating a Software Bill of Materials (SBOM) from container images and filesystems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/syft:1": {} + "ghcr.io/devcontainers-extra/features/syft:1": {} } ``` @@ -18,3 +18,7 @@ Syft is A CLI tool and Go library for generating a Software Bill of Materials (S | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/syntaqx-serve/README.md b/src/syntaqx-serve/README.md index e755f2cb3..566d872a3 100644 --- a/src/syntaqx-serve/README.md +++ b/src/syntaqx-serve/README.md @@ -3,11 +3,11 @@ serve is a static http server anywhere you need one. It's basically python -m SimpleHTTPServer 8080 written in Go, because who can remember that many letters? -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/syntaqx-serve:1": {} + "ghcr.io/devcontainers-extra/features/syntaqx-serve:1": {} } ``` @@ -18,3 +18,7 @@ serve is a static http server anywhere you need one. It's basically python -m Si | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tailscale/README.md b/src/tailscale/README.md index 1fc27e6b9..7d3a05875 100644 --- a/src/tailscale/README.md +++ b/src/tailscale/README.md @@ -3,13 +3,22 @@ Tailscale is a VPN service that makes the devices and applications you own accessible anywhere in the world, securely and effortlessly. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tailscale:1": {} + "ghcr.io/devcontainers-extra/features/tailscale:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/taxi-sdkman/README.md b/src/taxi-sdkman/README.md index 3498737e0..eb68c5b5f 100644 --- a/src/taxi-sdkman/README.md +++ b/src/taxi-sdkman/README.md @@ -5,11 +5,11 @@ Taxi is a language for documenting data - such as data models - and the contracts of APIs. It describes data semantically, allowing powerful tooling to discover and map data based on it's meaning, rather than the name of a field. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/taxi-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/taxi-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ discover and map data based on it's meaning, rather than the name of a field. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tea/README.md b/src/tea/README.md index 26862af0d..da17b3dbc 100644 --- a/src/tea/README.md +++ b/src/tea/README.md @@ -3,11 +3,11 @@ tea is the next-generation, cross-platform package manager from the creator of brew. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tea:1": {} + "ghcr.io/devcontainers-extra/features/tea:1": {} } ``` @@ -18,3 +18,7 @@ tea is the next-generation, cross-platform package manager from the creator of b | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tekton-cli/README.md b/src/tekton-cli/README.md index c277355d8..086d03fac 100644 --- a/src/tekton-cli/README.md +++ b/src/tekton-cli/README.md @@ -3,11 +3,11 @@ The Tekton Pipelines CLI project provides a command-line interface (CLI) for interacting with Tekton, an open-source framework for Continuous Integration and Delivery (CI/CD) systems. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tekton-cli:1": {} + "ghcr.io/devcontainers-extra/features/tekton-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Tekton Pipelines CLI project provides a command-line interface (CLI) for int | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tempo/README.md b/src/tempo/README.md index 01cd4ce62..6cd5c042b 100644 --- a/src/tempo/README.md +++ b/src/tempo/README.md @@ -3,11 +3,11 @@ Grafana Tempo is an open source, easy-to-use and high-scale distributed tracing backend. Tempo is cost-efficient, requiring only object storage to operate, and is deeply integrated with Grafana, Prometheus, and Loki. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tempo:1": {} + "ghcr.io/devcontainers-extra/features/tempo:1": {} } ``` @@ -18,3 +18,7 @@ Grafana Tempo is an open source, easy-to-use and high-scale distributed tracing | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/temporal-cli/README.md b/src/temporal-cli/README.md index 01940fce6..f038b3fdd 100644 --- a/src/temporal-cli/README.md +++ b/src/temporal-cli/README.md @@ -3,11 +3,11 @@ Temporal CLI is a Command-line interface for running Temporal Server and interacting with Workflows, Activities, Namespaces, and other parts of Temporal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/temporal-cli:1": {} + "ghcr.io/devcontainers-extra/features/temporal-cli:1": {} } ``` @@ -18,3 +18,7 @@ Temporal CLI is a Command-line interface for running Temporal Server and interac | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terracognita/README.md b/src/terracognita/README.md index 9cb095556..3f52b8a67 100644 --- a/src/terracognita/README.md +++ b/src/terracognita/README.md @@ -3,11 +3,11 @@ TerraCognita is open-source software that quickly and automatically creates Terraform from all of your manually-provisioned resources. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terracognita:1": {} + "ghcr.io/devcontainers-extra/features/terracognita:1": {} } ``` @@ -18,3 +18,7 @@ TerraCognita is open-source software that quickly and automatically creates Terr | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terraform-asdf/README.md b/src/terraform-asdf/README.md index f1ab7b00e..f749b9fb3 100644 --- a/src/terraform-asdf/README.md +++ b/src/terraform-asdf/README.md @@ -3,11 +3,11 @@ Terraform is an open-source infrastructure as code software tool that enables you to safely and predictably create, change, and improve infrastructure. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terraform-asdf:2": {} + "ghcr.io/devcontainers-extra/features/terraform-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Terraform is an open-source infrastructure as code software tool that enables yo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terraform-docs/README.md b/src/terraform-docs/README.md index 630f23c68..93ebddf6d 100644 --- a/src/terraform-docs/README.md +++ b/src/terraform-docs/README.md @@ -3,11 +3,11 @@ terraform-docs is a utility to generate documentation from Terraform modules in various output formats. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terraform-docs:1": {} + "ghcr.io/devcontainers-extra/features/terraform-docs:1": {} } ``` @@ -18,3 +18,7 @@ terraform-docs is a utility to generate documentation from Terraform modules in | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terraform-ls-asdf/README.md b/src/terraform-ls-asdf/README.md index 390d7bbc0..b45d4c084 100644 --- a/src/terraform-ls-asdf/README.md +++ b/src/terraform-ls-asdf/README.md @@ -3,11 +3,11 @@ The official Terraform language server (terraform-ls) maintained by HashiCorp provides IDE features to any LSP-compatible editor. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terraform-ls-asdf:2": {} + "ghcr.io/devcontainers-extra/features/terraform-ls-asdf:2": {} } ``` @@ -18,3 +18,7 @@ The official Terraform language server (terraform-ls) maintained by HashiCorp pr | version | Select the version of Terraform Language Server (terraform-ls) to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terraformer/README.md b/src/terraformer/README.md index 07898cde0..10bcffbdd 100644 --- a/src/terraformer/README.md +++ b/src/terraformer/README.md @@ -3,11 +3,11 @@ Terraformer is a CLI tool to generate terraform files from existing infrastructure (reverse Terraform). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terraformer:1": {} + "ghcr.io/devcontainers-extra/features/terraformer:1": {} } ``` @@ -18,3 +18,7 @@ Terraformer is a CLI tool to generate terraform files from existing infrastructu | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terragrunt/README.md b/src/terragrunt/README.md index bdae0c84f..266189784 100644 --- a/src/terragrunt/README.md +++ b/src/terragrunt/README.md @@ -3,11 +3,11 @@ Terragrunt is a thin wrapper that provides extra tools for keeping your configurations DRY, working with multiple Terraform modules, and managing remote state. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terragrunt:1": {} + "ghcr.io/devcontainers-extra/features/terragrunt:1": {} } ``` @@ -18,3 +18,7 @@ Terragrunt is a thin wrapper that provides extra tools for keeping your configur | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terramate/README.md b/src/terramate/README.md index 44a08461e..6303b69a5 100644 --- a/src/terramate/README.md +++ b/src/terramate/README.md @@ -3,11 +3,11 @@ Terramate adds powerful capabilities such as code generation, stacks, orchestration, change detection, data sharing and more to Terraform. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/terramate:1": {} + "ghcr.io/devcontainers-extra/features/terramate:1": {} } ``` @@ -18,3 +18,7 @@ Terramate adds powerful capabilities such as code generation, stacks, orchestrat | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/terrascan/README.md b/src/terrascan/README.md index 7ce2acc65..f46251a44 100644 --- a/src/terrascan/README.md +++ b/src/terrascan/README.md @@ -7,7 +7,7 @@ Terrascan is a static code analyzer for Infrastructure as Code. ```json "features": { - "ghcr.io/devcontainers-contrib/features/terrascan:1": {} + "ghcr.io/devcontainers-extra/features/terrascan:1": {} } ``` @@ -21,4 +21,4 @@ Terrascan is a static code analyzer for Infrastructure as Code. --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/terrascan/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tfc-agent-asdf/README.md b/src/tfc-agent-asdf/README.md index c2f2a2c16..5e6e0cfe8 100644 --- a/src/tfc-agent-asdf/README.md +++ b/src/tfc-agent-asdf/README.md @@ -3,11 +3,11 @@ Installs tfc-agent -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tfc-agent-asdf:2": {} + "ghcr.io/devcontainers-extra/features/tfc-agent-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs tfc-agent | version | Select the version of tfc-agent to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tfcdk-cli/README.md b/src/tfcdk-cli/README.md index 84a58c677..e059e642f 100644 --- a/src/tfcdk-cli/README.md +++ b/src/tfcdk-cli/README.md @@ -3,11 +3,11 @@ Cloud Development Kit for Terraform (CDKTF) allows you to use familiar programming languages to define and provision infrastructure. This gives you access to the entire Terraform ecosystem without learning HashiCorp Configuration Language (HCL) and lets you leverage the power of your existing toolchain for testing, dependency management, etc. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tfcdk-cli:1": {} + "ghcr.io/devcontainers-extra/features/tfcdk-cli:1": {} } ``` @@ -18,3 +18,7 @@ Cloud Development Kit for Terraform (CDKTF) allows you to use familiar programmi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tfenv-homebrew/README.md b/src/tfenv-homebrew/README.md index 554a45a9a..a9de652c8 100644 --- a/src/tfenv-homebrew/README.md +++ b/src/tfenv-homebrew/README.md @@ -3,11 +3,11 @@ tfenv is an open-source Terraform version manager tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tfenv-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/tfenv-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ tfenv is an open-source Terraform version manager tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tfsec/README.md b/src/tfsec/README.md index 453228a97..515d56aba 100644 --- a/src/tfsec/README.md +++ b/src/tfsec/README.md @@ -3,11 +3,11 @@ tfsec is a static analysis security scanner for your Terraform code. Designed to run locally and in your CI pipelines, developer-friendly output and fully documented checks mean detection and remediation can take place as quickly and efficiently as possible -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tfsec:1": {} + "ghcr.io/devcontainers-extra/features/tfsec:1": {} } ``` @@ -18,3 +18,7 @@ tfsec is a static analysis security scanner for your Terraform code. Designed to | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tfswitch/README.md b/src/tfswitch/README.md index 0b11df47b..392c64d6a 100644 --- a/src/tfswitch/README.md +++ b/src/tfswitch/README.md @@ -3,13 +3,22 @@ The tfswitch command line tool lets you switch between different versions of terraform. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tfswitch:1": {} + "ghcr.io/devcontainers-extra/features/tfswitch:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tldr/README.md b/src/tldr/README.md index e62bc1031..a0a979b29 100644 --- a/src/tldr/README.md +++ b/src/tldr/README.md @@ -3,11 +3,11 @@ The tldr pages are a community effort to simplify the beloved man pages with practical examples. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tldr:2": {} + "ghcr.io/devcontainers-extra/features/tldr:2": {} } ``` @@ -18,3 +18,7 @@ The tldr pages are a community effort to simplify the beloved man pages with pra | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tmate/README.md b/src/tmate/README.md index d6917c386..349c391e5 100644 --- a/src/tmate/README.md +++ b/src/tmate/README.md @@ -3,13 +3,22 @@ tmate is a terminal multiplexer with instant terminal sharing. It enables a number of terminals to be created, accessed, and controlled from a single screen and be shared with another mates. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tmate:1": {} + "ghcr.io/devcontainers-extra/features/tmate:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tmux-apt-get/README.md b/src/tmux-apt-get/README.md index 40b6e1780..47026e4c5 100644 --- a/src/tmux-apt-get/README.md +++ b/src/tmux-apt-get/README.md @@ -3,13 +3,22 @@ tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/tmux-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tmux-homebrew/README.md b/src/tmux-homebrew/README.md index 39101fba1..b6d29e70e 100644 --- a/src/tmux-homebrew/README.md +++ b/src/tmux-homebrew/README.md @@ -3,11 +3,11 @@ tmux is a terminal multiplexer: it enables a number of terminals to be created, accessed, and controlled from a single screen. tmux may be detached from a screen and continue running in the background, then later reattached. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tmux-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/tmux-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ tmux is a terminal multiplexer: it enables a number of terminals to be created, | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tomcat-sdkman/README.md b/src/tomcat-sdkman/README.md index d31832c29..114460e4d 100644 --- a/src/tomcat-sdkman/README.md +++ b/src/tomcat-sdkman/README.md @@ -5,11 +5,11 @@ The Apache Tomcat software is an open source implementation of the Java Servlet, JavaServer Pages, Java Expression Language and Java WebSocket technologies. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tomcat-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/tomcat-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ technologies. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tooljet-cli/README.md b/src/tooljet-cli/README.md index 2500625ae..b38ae7913 100644 --- a/src/tooljet-cli/README.md +++ b/src/tooljet-cli/README.md @@ -3,11 +3,11 @@ ToolJet is an open-source low-code framework to build and deploy internal tools quickly with minimal engineering effort. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tooljet-cli:1": {} + "ghcr.io/devcontainers-extra/features/tooljet-cli:1": {} } ``` @@ -18,3 +18,7 @@ ToolJet is an open-source low-code framework to build and deploy internal tools | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/toolkit-sdkman/README.md b/src/toolkit-sdkman/README.md index b9012c816..385a2c1a0 100644 --- a/src/toolkit-sdkman/README.md +++ b/src/toolkit-sdkman/README.md @@ -4,11 +4,11 @@ Toolkit is a client command line tool that supports multiple IoT protocols like MQTT and COAP. It provides a command line client tool for IoT development. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/toolkit-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/toolkit-sdkman:2": {} } ``` @@ -21,3 +21,7 @@ MQTT and COAP. It provides a command line client tool for IoT development. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tox/README.md b/src/tox/README.md index e078b0fbd..3d79f55e7 100644 --- a/src/tox/README.md +++ b/src/tox/README.md @@ -3,11 +3,11 @@ tox is a generic virtual environment management and test command line tool. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tox:2": {} + "ghcr.io/devcontainers-extra/features/tox:2": {} } ``` @@ -18,3 +18,7 @@ tox is a generic virtual environment management and test command line tool. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/trello-cli/README.md b/src/trello-cli/README.md index 8661597d3..0763ae64d 100644 --- a/src/trello-cli/README.md +++ b/src/trello-cli/README.md @@ -3,11 +3,11 @@ trello-cli is a CLI tool for Trello. Makes sense, right? -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/trello-cli:1": {} + "ghcr.io/devcontainers-extra/features/trello-cli:1": {} } ``` @@ -18,3 +18,7 @@ trello-cli is a CLI tool for Trello. Makes sense, right? | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tridentctl-asdf/README.md b/src/tridentctl-asdf/README.md index 57ce3ef78..590523699 100644 --- a/src/tridentctl-asdf/README.md +++ b/src/tridentctl-asdf/README.md @@ -3,11 +3,11 @@ Installs tridentctl -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tridentctl-asdf:2": {} + "ghcr.io/devcontainers-extra/features/tridentctl-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs tridentctl | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/truffle/README.md b/src/truffle/README.md index 867d7850e..e8948f80e 100644 --- a/src/truffle/README.md +++ b/src/truffle/README.md @@ -3,11 +3,11 @@ Truffle is a development environment, testing framework, and asset pipeline for Ethereum, aiming to make life as an Ethereum developer easier. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/truffle:1": {} + "ghcr.io/devcontainers-extra/features/truffle:1": {} } ``` @@ -18,3 +18,7 @@ Truffle is a development environment, testing framework, and asset pipeline for | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ts-node/README.md b/src/ts-node/README.md index 7ed57fce1..6dd6d435a 100644 --- a/src/ts-node/README.md +++ b/src/ts-node/README.md @@ -3,11 +3,11 @@ ts-node is a TypeScript execution engine and REPL for Node.js. It JIT transforms TypeScript into JavaScript, enabling you to directly execute TypeScript on Node.js without precompiling. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ts-node:1": {} + "ghcr.io/devcontainers-extra/features/ts-node:1": {} } ``` @@ -18,3 +18,7 @@ ts-node is a TypeScript execution engine and REPL for Node.js. It JIT transforms | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/tsx/README.md b/src/tsx/README.md index 9bca14789..f1d8c2feb 100644 --- a/src/tsx/README.md +++ b/src/tsx/README.md @@ -3,11 +3,11 @@ tsx is a CLI command (alternative to node) for seamlessly running TypeScript & ESM, in both commonjs & module package types. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/tsx:1": {} + "ghcr.io/devcontainers-extra/features/tsx:1": {} } ``` @@ -18,3 +18,7 @@ tsx is a CLI command (alternative to node) for seamlessly running TypeScript & E | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/turborepo-npm/README.md b/src/turborepo-npm/README.md index c0a90ee80..462b352e2 100644 --- a/src/turborepo-npm/README.md +++ b/src/turborepo-npm/README.md @@ -3,11 +3,11 @@ Turborepo is a high-performance build system for JavaScript and TypeScript codebases. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/turborepo-npm:1": {} + "ghcr.io/devcontainers-extra/features/turborepo-npm:1": {} } ``` @@ -18,3 +18,7 @@ Turborepo is a high-performance build system for JavaScript and TypeScript codeb | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/twine/README.md b/src/twine/README.md index c95b5291f..ec6d3763e 100644 --- a/src/twine/README.md +++ b/src/twine/README.md @@ -3,11 +3,11 @@ Twine is a utility for publishing Python packages on PyPI. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/twine:2": {} + "ghcr.io/devcontainers-extra/features/twine:2": {} } ``` @@ -18,3 +18,7 @@ Twine is a utility for publishing Python packages on PyPI. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/typescript/README.md b/src/typescript/README.md index 0db6cc6c8..4f502fc98 100644 --- a/src/typescript/README.md +++ b/src/typescript/README.md @@ -3,11 +3,11 @@ TypeScript is a strongly typed programming language that builds on JavaScript, giving you better tooling at any scale. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/typescript:2": {} + "ghcr.io/devcontainers-extra/features/typescript:2": {} } ``` @@ -18,3 +18,7 @@ TypeScript is a strongly typed programming language that builds on JavaScript, g | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/typst/README.md b/src/typst/README.md index fd6490ee3..d0dd34ec5 100644 --- a/src/typst/README.md +++ b/src/typst/README.md @@ -3,11 +3,11 @@ Typst is a new markup-based typesetting system that is designed to be as powerful as LaTeX while being much easier to learn and use. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/typst:1": {} + "ghcr.io/devcontainers-extra/features/typst:1": {} } ``` @@ -18,3 +18,7 @@ Typst is a new markup-based typesetting system that is designed to be as powerfu | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/ufmt/README.md b/src/ufmt/README.md index fe86d8009..43e280d0e 100644 --- a/src/ufmt/README.md +++ b/src/ufmt/README.md @@ -3,11 +3,11 @@ µfmt is a safe, atomic code formatter for Python built on top of black and µsort. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ufmt:1.0.0": {} + "ghcr.io/devcontainers-extra/features/ufmt:1": {} } ``` @@ -18,3 +18,7 @@ | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/upx/README.md b/src/upx/README.md index 24b5c070d..0323ccdc1 100644 --- a/src/upx/README.md +++ b/src/upx/README.md @@ -3,11 +3,11 @@ UPX is a free, secure, portable, extendable, high-performance executable packer for several executable formats. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/upx:1": {} + "ghcr.io/devcontainers-extra/features/upx:1": {} } ``` @@ -18,3 +18,7 @@ UPX is a free, secure, portable, extendable, high-performance executable packer | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vault-asdf/README.md b/src/vault-asdf/README.md index b1c246806..582290ed2 100644 --- a/src/vault-asdf/README.md +++ b/src/vault-asdf/README.md @@ -3,11 +3,11 @@ Vault secures, stores, and tightly controls access to tokens, passwords, certificates, API keys, and other secrets in modern computing. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vault-asdf:2": {} + "ghcr.io/devcontainers-extra/features/vault-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Vault secures, stores, and tightly controls access to tokens, passwords, certifi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vercel-cli/README.md b/src/vercel-cli/README.md index 2cad8a7ef..bc39b49c6 100644 --- a/src/vercel-cli/README.md +++ b/src/vercel-cli/README.md @@ -3,11 +3,11 @@ With Vercel CLI command-line interface (CLI) you can interact with the Vercel platform using a terminal, or through an automated system, enabling you to retrieve logs, manage certificates, replicate your deployment environment locally, manage Domain Name System (DNS) records, and more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vercel-cli:1": {} + "ghcr.io/devcontainers-extra/features/vercel-cli:1": {} } ``` @@ -18,3 +18,7 @@ With Vercel CLI command-line interface (CLI) you can interact with the Vercel pl | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vercel-ncc/README.md b/src/vercel-ncc/README.md index 75cf17a6b..72cf66eb6 100644 --- a/src/vercel-ncc/README.md +++ b/src/vercel-ncc/README.md @@ -3,11 +3,11 @@ ncc is a simple CLI for compiling a Node.js module into a single file, together with all its dependencies, gcc-style. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vercel-ncc:1": {} + "ghcr.io/devcontainers-extra/features/vercel-ncc:1": {} } ``` @@ -18,3 +18,7 @@ ncc is a simple CLI for compiling a Node.js module into a single file, together | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vercel-pkg/README.md b/src/vercel-pkg/README.md index 790e226aa..7cebd2d42 100644 --- a/src/vercel-pkg/README.md +++ b/src/vercel-pkg/README.md @@ -3,11 +3,11 @@ This command line interface enables you to package your Node.js project into an executable that can be run even on devices without Node.js installed. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vercel-pkg:1": {} + "ghcr.io/devcontainers-extra/features/vercel-pkg:1": {} } ``` @@ -18,3 +18,7 @@ This command line interface enables you to package your Node.js project into an | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vercel-release/README.md b/src/vercel-release/README.md index a7835cc0d..244c0b934 100644 --- a/src/vercel-release/README.md +++ b/src/vercel-release/README.md @@ -3,11 +3,11 @@ Release is a command line tool to automatically generate a new GitHub Release and populates it with the changes (commits) made since the last release. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vercel-release:1": {} + "ghcr.io/devcontainers-extra/features/vercel-release:1": {} } ``` @@ -18,3 +18,7 @@ Release is a command line tool to automatically generate a new GitHub Release an | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vercel-serve/README.md b/src/vercel-serve/README.md index 5baac3195..0bd9f0726 100644 --- a/src/vercel-serve/README.md +++ b/src/vercel-serve/README.md @@ -3,11 +3,11 @@ serve helps you serve a static site, single page application or just a static file (no matter if on your device or on the local network). -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vercel-serve:1": {} + "ghcr.io/devcontainers-extra/features/vercel-serve:1": {} } ``` @@ -18,3 +18,7 @@ serve helps you serve a static site, single page application or just a static fi | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vscode-cli/README.md b/src/vscode-cli/README.md index a44b25c94..5f8e393db 100644 --- a/src/vscode-cli/README.md +++ b/src/vscode-cli/README.md @@ -1,12 +1,13 @@ + # Visual Studio Code CLI (vscode-cli) -[Visual Studio Code CLI](https://code.visualstudio.com/docs/editor/command-line) (official Microsoft build), which manages the Visual Studio Code Server and `vscode.dev` tunnels. +Visual Studio Code CLI (official Microsoft build), which manages the Visual Studio Code Server and vscode.dev tunnels. ## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vscode-cli:1": {} + "ghcr.io/devcontainers-extra/features/vscode-cli:1": {} } ``` @@ -14,10 +15,10 @@ | Options Id | Description | Type | Default Value | |-----|-----|-----|-----| -| version | Specify the Visual Studio Code CLI version to install; or "latest"; or "commit:<hash>" | string | latest | +| version | Specify the Visual Studio Code CLI version to install; or "latest"; or "commit:" | string | latest | ---- -## Launch -Run `code serve-web` to download and run Visual Studio Code Server listening on port 8000; or run `code tunnel` to start a `vscode.dev` tunnel. Commands that require a full desktop installation of Visual Studio Code will not work. +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vscode-server/README.md b/src/vscode-server/README.md index faca19c72..798493607 100644 --- a/src/vscode-server/README.md +++ b/src/vscode-server/README.md @@ -7,7 +7,7 @@ Visual Studio Code Server (official Microsoft build), which hosts Visual Studio ```json "features": { - "ghcr.io/devcontainers-contrib/features/vscode-server:1": {} + "ghcr.io/devcontainers-extra/features/vscode-server:1": {} } ``` @@ -15,8 +15,10 @@ Visual Studio Code Server (official Microsoft build), which hosts Visual Studio | Options Id | Description | Type | Default Value | |-----|-----|-----|-----| -| version | Specify the Visual Studio Code Server version to install; or "latest"; or "commit:<hash>" | string | latest | +| version | Specify the Visual Studio Code Server version to install; or "latest"; or "commit:" | string | latest | -## Launch -Run `code-server`, passing `--accept-server-license-terms` if you have reviewed the [Visual Studio Code Server License Terms](https://aka.ms/vscode-server-license) and the [Microsoft Privacy Statement](https://privacy.microsoft.com/en-US/privacystatement). By default, Visual Studio Code Server listens on port 8000, which you may access via, e.g., [port forwarding in desktop Visual Studio Code](https://code.visualstudio.com/docs/devcontainers/containers#_temporarily-forwarding-a-port) or [Tailscale serve](https://tailscale.com/kb/1312/serve). Run `code-server --help` to see all options. + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vtop/README.md b/src/vtop/README.md index 675fa66e7..13f6ed247 100644 --- a/src/vtop/README.md +++ b/src/vtop/README.md @@ -3,11 +3,11 @@ vtop is a graphical activity monitor for the command line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vtop:2": {} + "ghcr.io/devcontainers-extra/features/vtop:2": {} } ``` @@ -18,3 +18,7 @@ vtop is a graphical activity monitor for the command line. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vue-cli/README.md b/src/vue-cli/README.md index a48d555d4..99c5e4177 100644 --- a/src/vue-cli/README.md +++ b/src/vue-cli/README.md @@ -3,11 +3,11 @@ Vue CLI is a full system for rapid Vue.js development. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vue-cli:2": {} + "ghcr.io/devcontainers-extra/features/vue-cli:2": {} } ``` @@ -18,3 +18,7 @@ Vue CLI is a full system for rapid Vue.js development. | version | Select the version of Vue CLI to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/vulture/README.md b/src/vulture/README.md index 70c7df02e..867b75cab 100644 --- a/src/vulture/README.md +++ b/src/vulture/README.md @@ -3,11 +3,11 @@ Vulture finds unused code in Python programs. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vulture:2": {} + "ghcr.io/devcontainers-extra/features/vulture:2": {} } ``` @@ -18,3 +18,7 @@ Vulture finds unused code in Python programs. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/w3m-apt-get/README.md b/src/w3m-apt-get/README.md index b8401c94d..23d93b1e9 100644 --- a/src/w3m-apt-get/README.md +++ b/src/w3m-apt-get/README.md @@ -3,13 +3,22 @@ w3m is a free software/open source text-based web browser and terminal pager. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/w3m-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/w3m-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/w3m-homebrew/README.md b/src/w3m-homebrew/README.md index 3322138be..cfbebb45e 100644 --- a/src/w3m-homebrew/README.md +++ b/src/w3m-homebrew/README.md @@ -3,11 +3,11 @@ w3m is a free software/open source text-based web browser and terminal pager. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/w3m-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/w3m-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ w3m is a free software/open source text-based web browser and terminal pager. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/waypoint-asdf/README.md b/src/waypoint-asdf/README.md index 7361895c6..0412a6b20 100644 --- a/src/waypoint-asdf/README.md +++ b/src/waypoint-asdf/README.md @@ -3,11 +3,11 @@ Waypoint is an open source solution that provides a modern workflow for build, deploy, and release across platforms. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/waypoint-asdf:2": {} + "ghcr.io/devcontainers-extra/features/waypoint-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Waypoint is an open source solution that provides a modern workflow for build, d | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/webtau-sdkman/README.md b/src/webtau-sdkman/README.md index 013c7760d..dbcdcd661 100644 --- a/src/webtau-sdkman/README.md +++ b/src/webtau-sdkman/README.md @@ -8,11 +8,11 @@ Consistent set of matchers and testing concepts work across all testing layers. Use REPL mode to speed-up tests development. Leverage rich reporting to significantly cut down failure investigation time. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/webtau-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/webtau-sdkman:2": {} } ``` @@ -25,3 +25,7 @@ significantly cut down failure investigation time. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/wget-apt-get/README.md b/src/wget-apt-get/README.md index d60c05e36..4738b93b2 100644 --- a/src/wget-apt-get/README.md +++ b/src/wget-apt-get/README.md @@ -3,13 +3,22 @@ Wget is a free software package for retrieving files using HTTP, HTTPS, FTP and FTPS, the most widely used Internet protocols. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/wget-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/wget-homebrew/README.md b/src/wget-homebrew/README.md index e32ea5b6e..bea67555e 100644 --- a/src/wget-homebrew/README.md +++ b/src/wget-homebrew/README.md @@ -3,11 +3,11 @@ Wget is a free software package for retrieving files using HTTP, HTTPS, FTP and FTPS, the most widely used Internet protocols. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/wget-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/wget-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ Wget is a free software package for retrieving files using HTTP, HTTPS, FTP and | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/wireguard-apt-get/README.md b/src/wireguard-apt-get/README.md index e632513cf..aeb1592c3 100644 --- a/src/wireguard-apt-get/README.md +++ b/src/wireguard-apt-get/README.md @@ -3,13 +3,22 @@ WireGuard is an extremely simple yet fast and modern VPN that utilizes state-of-the-art cryptography. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/wireguard-apt-get:1": {} + "ghcr.io/devcontainers-extra/features/wireguard-apt-get:1": {} } ``` +## Options +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| + + + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/xmrig/README.md b/src/xmrig/README.md index 78464639e..f8e25dd96 100644 --- a/src/xmrig/README.md +++ b/src/xmrig/README.md @@ -3,11 +3,11 @@ XMRig is a high performance, open source, cross platform RandomX, KawPow, CryptoNight and GhostRider unified CPU/GPU miner and RandomX benchmark. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/xmrig:1": {} + "ghcr.io/devcontainers-extra/features/xmrig:1": {} } ``` @@ -18,3 +18,7 @@ XMRig is a high performance, open source, cross platform RandomX, KawPow, Crypto | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/xonsh/README.md b/src/xonsh/README.md index c0abaa815..55b80c672 100644 --- a/src/xonsh/README.md +++ b/src/xonsh/README.md @@ -3,11 +3,11 @@ Xonsh is a Python-powered, cross-platform, Unix-gazing shell language and command prompt. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/xonsh:1": {} + "ghcr.io/devcontainers-extra/features/xonsh:1": {} } ``` @@ -18,3 +18,7 @@ Xonsh is a Python-powered, cross-platform, Unix-gazing shell language and comman | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/yamllint/README.md b/src/yamllint/README.md index 741004799..d621c792f 100644 --- a/src/yamllint/README.md +++ b/src/yamllint/README.md @@ -3,11 +3,11 @@ yamllint is a Python utility / library to sort imports alphabetically, and automatically separated into sections and by type. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/yamllint:2": {} + "ghcr.io/devcontainers-extra/features/yamllint:2": {} } ``` @@ -18,3 +18,7 @@ yamllint is a Python utility / library to sort imports alphabetically, and autom | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/yapf/README.md b/src/yapf/README.md index e0fcb8f08..842f650a2 100644 --- a/src/yapf/README.md +++ b/src/yapf/README.md @@ -3,11 +3,11 @@ yapf is a formatter for Python files. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/yapf:2": {} + "ghcr.io/devcontainers-extra/features/yapf:2": {} } ``` @@ -18,3 +18,7 @@ yapf is a formatter for Python files. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/youtube-dl/README.md b/src/youtube-dl/README.md index 65d2b77f2..d1eb85292 100644 --- a/src/youtube-dl/README.md +++ b/src/youtube-dl/README.md @@ -3,11 +3,11 @@ youtube-dl is a command-line program to download videos from YouTube.com and other video sites. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/youtube-dl:2": {} + "ghcr.io/devcontainers-extra/features/youtube-dl:2": {} } ``` @@ -18,3 +18,7 @@ youtube-dl is a command-line program to download videos from YouTube.com and oth | version | Select the version of youtube-dl to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/youtubeuploader/README.md b/src/youtubeuploader/README.md index 189dab51e..383f89a1b 100644 --- a/src/youtubeuploader/README.md +++ b/src/youtubeuploader/README.md @@ -3,11 +3,11 @@ Scripted uploads to Youtube. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/youtubeuploader:1": {} + "ghcr.io/devcontainers-extra/features/youtubeuploader:1": {} } ``` @@ -18,3 +18,7 @@ Scripted uploads to Youtube. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/yt-dlp/README.md b/src/yt-dlp/README.md index 9a2918dec..560c6f78f 100644 --- a/src/yt-dlp/README.md +++ b/src/yt-dlp/README.md @@ -3,11 +3,11 @@ yt-dlp is a youtube-dl fork with additional features and fixes. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/yt-dlp:2": {} + "ghcr.io/devcontainers-extra/features/yt-dlp:2": {} } ``` @@ -18,3 +18,7 @@ yt-dlp is a youtube-dl fork with additional features and fixes. | version | Select the version of yt-dlp to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/zig/README.md b/src/zig/README.md index 3757bf247..1f6a4f28d 100644 --- a/src/zig/README.md +++ b/src/zig/README.md @@ -7,7 +7,7 @@ Zig is a general-purpose programming language and toolchain for maintaining robu ```json "features": { - "ghcr.io/devcontainers-contrib/features/zig:1": {} + "ghcr.io/devcontainers-extra/features/zig:1": {} } ``` @@ -21,4 +21,4 @@ Zig is a general-purpose programming language and toolchain for maintaining robu --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/zig/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/src/zsh-plugins/README.md b/src/zsh-plugins/README.md index 3b5ba04d2..26366b5d9 100644 --- a/src/zsh-plugins/README.md +++ b/src/zsh-plugins/README.md @@ -7,7 +7,7 @@ Install (Oh-My-)ZSH plugins ```json "features": { - "ghcr.io/devcontainers-contrib/features/zsh-plugins:0": {} + "ghcr.io/devcontainers-extra/features/zsh-plugins:0": {} } ``` @@ -23,4 +23,4 @@ Install (Oh-My-)ZSH plugins --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/zsh-plugins/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ From 44435ec5fff43eba186d3bdc1e57198f015d03d8 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:08:32 +0000 Subject: [PATCH 26/38] chore: regenerate READMEs for archived features --- .../src/actions-runner-noexternals/README.md | 8 ++++-- .../README.md | 8 ++++-- .../src/actions-runner-noruntime/README.md | 8 ++++-- archive/src/age-keygen/README.md | 8 ++++-- archive/src/age/README.md | 8 ++++-- archive/src/airplane-cli/README.md | 8 ++++-- archive/src/bitwarden-cli/README.md | 8 ++++-- archive/src/boundary-asdf/README.md | 8 ++++-- archive/src/btm/README.md | 8 ++++-- archive/src/cert-manager/README.md | 8 ++++-- archive/src/cmctl-asdf/README.md | 8 ++++-- archive/src/codenotary-cas/README.md | 8 ++++-- archive/src/croc/README.md | 8 ++++-- archive/src/cue-asdf/README.md | 8 ++++-- archive/src/edge-impulse-cli/README.md | 8 ++++-- archive/src/elixir-asdf/README.md | 8 ++++-- archive/src/erlang-asdf/README.md | 8 ++++-- archive/src/ffmpeg-homebrew/README.md | 8 ++++-- archive/src/gh-release/README.md | 8 ++++-- archive/src/homebrew-package/README.md | 4 +-- archive/src/ko/README.md | 8 ++++-- archive/src/kubescape/README.md | 8 ++++-- archive/src/mage/README.md | 8 ++++-- archive/src/meson-asdf/README.md | 8 ++++-- archive/src/meteor-cli/README.md | 8 ++++-- archive/src/nushell/README.md | 8 ++++-- archive/src/ory-keto/README.md | 8 ++++-- archive/src/pip-audit/README.md | 8 ++++-- archive/src/porter/README.md | 28 +++++++++++-------- archive/src/postgres-asdf/README.md | 8 ++++-- archive/src/rabbitmq-asdf/README.md | 8 ++++-- archive/src/scala-asdf/README.md | 8 ++++-- archive/src/serverless/README.md | 8 ++++-- archive/src/sqlfmt/README.md | 8 ++++-- archive/src/syncthing/README.md | 8 ++++-- archive/src/trivy/README.md | 8 ++++-- archive/src/vertx-sdkman/README.md | 8 ++++-- archive/src/visualvm-sdkman/README.md | 8 ++++-- archive/src/volta/README.md | 8 ++++-- archive/src/xplr/README.md | 8 ++++-- 40 files changed, 246 insertions(+), 90 deletions(-) diff --git a/archive/src/actions-runner-noexternals/README.md b/archive/src/actions-runner-noexternals/README.md index 249715cc3..265bd1728 100644 --- a/archive/src/actions-runner-noexternals/README.md +++ b/archive/src/actions-runner-noexternals/README.md @@ -3,11 +3,11 @@ The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the hosted virtual environments, or you can self-host the runner in your own environment. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/actions-runner-noexternals:1": {} + "ghcr.io/devcontainers-extra/features/actions-runner-noexternals:1": {} } ``` @@ -19,3 +19,7 @@ The runner is the application that runs a job from a GitHub Actions workflow. It | dotnetVersion | Select the dotnet version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/actions-runner-noruntime-noexternals/README.md b/archive/src/actions-runner-noruntime-noexternals/README.md index 3a0459488..2d5aceb68 100644 --- a/archive/src/actions-runner-noruntime-noexternals/README.md +++ b/archive/src/actions-runner-noruntime-noexternals/README.md @@ -3,11 +3,11 @@ The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the hosted virtual environments, or you can self-host the runner in your own environment. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/actions-runner-noruntime-noexternals:1": {} + "ghcr.io/devcontainers-extra/features/actions-runner-noruntime-noexternals:1": {} } ``` @@ -19,3 +19,7 @@ The runner is the application that runs a job from a GitHub Actions workflow. It | dotnetVersion | Select the dotnet version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/actions-runner-noruntime/README.md b/archive/src/actions-runner-noruntime/README.md index 15112fd05..d6585f55e 100644 --- a/archive/src/actions-runner-noruntime/README.md +++ b/archive/src/actions-runner-noruntime/README.md @@ -3,11 +3,11 @@ The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the hosted virtual environments, or you can self-host the runner in your own environment. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/actions-runner-noruntime:1": {} + "ghcr.io/devcontainers-extra/features/actions-runner-noruntime:1": {} } ``` @@ -19,3 +19,7 @@ The runner is the application that runs a job from a GitHub Actions workflow. It | dotnetVersion | Select the dotnet version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/age-keygen/README.md b/archive/src/age-keygen/README.md index 27e6c958a..58afdf334 100644 --- a/archive/src/age-keygen/README.md +++ b/archive/src/age-keygen/README.md @@ -3,11 +3,11 @@ age-keygen generate a key pair for use with age. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/age-keygen:1": {} + "ghcr.io/devcontainers-extra/features/age-keygen:1": {} } ``` @@ -18,3 +18,7 @@ age-keygen generate a key pair for use with age. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/age/README.md b/archive/src/age/README.md index 1a6efa55a..6f26c1d15 100644 --- a/archive/src/age/README.md +++ b/archive/src/age/README.md @@ -3,11 +3,11 @@ age is a simple, modern and secure encryption tool (and Go library) with small explicit keys, no config options, and UNIX-style composability. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/age:1": {} + "ghcr.io/devcontainers-extra/features/age:1": {} } ``` @@ -18,3 +18,7 @@ age is a simple, modern and secure encryption tool (and Go library) with small e | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/airplane-cli/README.md b/archive/src/airplane-cli/README.md index 5fe4e846a..24cb1d9eb 100644 --- a/archive/src/airplane-cli/README.md +++ b/archive/src/airplane-cli/README.md @@ -3,11 +3,11 @@ The Airplane CLI, airplane, is the primary way you'll create and deploy new tasks and runbooks. The CLI also provides utility methods for managing API keys, getting/setting configs, and executing tasks/runbooks. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/airplane-cli:1": {} + "ghcr.io/devcontainers-extra/features/airplane-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Airplane CLI, airplane, is the primary way you'll create and deploy new task | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/bitwarden-cli/README.md b/archive/src/bitwarden-cli/README.md index 6fb0d770f..a7ea34739 100644 --- a/archive/src/bitwarden-cli/README.md +++ b/archive/src/bitwarden-cli/README.md @@ -3,11 +3,11 @@ The Bitwarden command-line interface (CLI) is a powerful, fully-featured tool for accessing and managing your vault. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/bitwarden-cli:1": {} + "ghcr.io/devcontainers-extra/features/bitwarden-cli:1": {} } ``` @@ -18,3 +18,7 @@ The Bitwarden command-line interface (CLI) is a powerful, fully-featured tool fo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/boundary-asdf/README.md b/archive/src/boundary-asdf/README.md index 275d48d40..61ab1e66a 100644 --- a/archive/src/boundary-asdf/README.md +++ b/archive/src/boundary-asdf/README.md @@ -3,11 +3,11 @@ Installs boundary -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/boundary-asdf:2": {} + "ghcr.io/devcontainers-extra/features/boundary-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Installs boundary | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/btm/README.md b/archive/src/btm/README.md index f7bfec9a6..a60d182ab 100644 --- a/archive/src/btm/README.md +++ b/archive/src/btm/README.md @@ -3,11 +3,11 @@ bottom is a customizable cross-platform graphical process/system monitor for the terminal. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/btm:1": {} + "ghcr.io/devcontainers-extra/features/btm:1": {} } ``` @@ -18,3 +18,7 @@ bottom is a customizable cross-platform graphical process/system monitor for the | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/cert-manager/README.md b/archive/src/cert-manager/README.md index 3e226657b..8ff290a12 100644 --- a/archive/src/cert-manager/README.md +++ b/archive/src/cert-manager/README.md @@ -3,11 +3,11 @@ cert-manager adds certificates and certificate issuers as resource types in Kubernetes clusters, and simplifies the process of obtaining, renewing and using those certificates. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cert-manager:1": {} + "ghcr.io/devcontainers-extra/features/cert-manager:1": {} } ``` @@ -18,3 +18,7 @@ cert-manager adds certificates and certificate issuers as resource types in Kube | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/cmctl-asdf/README.md b/archive/src/cmctl-asdf/README.md index 67c2b583e..6076f8727 100644 --- a/archive/src/cmctl-asdf/README.md +++ b/archive/src/cmctl-asdf/README.md @@ -3,11 +3,11 @@ cmctl is a command line tool that can help you manage cert-manager and its resources inside your cluster. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cmctl-asdf:2": {} + "ghcr.io/devcontainers-extra/features/cmctl-asdf:2": {} } ``` @@ -18,3 +18,7 @@ cmctl is a command line tool that can help you manage cert-manager and its resou | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/codenotary-cas/README.md b/archive/src/codenotary-cas/README.md index e1f65259b..e802181fb 100644 --- a/archive/src/codenotary-cas/README.md +++ b/archive/src/codenotary-cas/README.md @@ -3,11 +3,11 @@ Community Attestation Service (CAS) can give any digital asset a meaningful, globally-unique, immutable identity that is authentic, verifiable, traceable from anywhere. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/codenotary-cas:1": {} + "ghcr.io/devcontainers-extra/features/codenotary-cas:1": {} } ``` @@ -18,3 +18,7 @@ Community Attestation Service (CAS) can give any digital asset a meaningful, glo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/croc/README.md b/archive/src/croc/README.md index 35f75b250..a384d29fc 100644 --- a/archive/src/croc/README.md +++ b/archive/src/croc/README.md @@ -3,11 +3,11 @@ croc is a tool that allows any two computers to simply and securely transfer files and folders. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/croc:1": {} + "ghcr.io/devcontainers-extra/features/croc:1": {} } ``` @@ -18,3 +18,7 @@ croc is a tool that allows any two computers to simply and securely transfer fil | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/cue-asdf/README.md b/archive/src/cue-asdf/README.md index f6c18509e..9eb03700e 100644 --- a/archive/src/cue-asdf/README.md +++ b/archive/src/cue-asdf/README.md @@ -3,11 +3,11 @@ CUE is an open source data constraint language which aims to simplify tasks involving defining and using data. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/cue-asdf:2": {} + "ghcr.io/devcontainers-extra/features/cue-asdf:2": {} } ``` @@ -18,3 +18,7 @@ CUE is an open source data constraint language which aims to simplify tasks invo | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/edge-impulse-cli/README.md b/archive/src/edge-impulse-cli/README.md index 40c56648f..1d6a84832 100644 --- a/archive/src/edge-impulse-cli/README.md +++ b/archive/src/edge-impulse-cli/README.md @@ -3,11 +3,11 @@ Edge Impulse CLI is used to control local devices, act as a proxy to synchronise data for devices that don't have an internet connection, and to upload and convert local files. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/edge-impulse-cli:1": {} + "ghcr.io/devcontainers-extra/features/edge-impulse-cli:1": {} } ``` @@ -18,3 +18,7 @@ Edge Impulse CLI is used to control local devices, act as a proxy to synchronise | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/elixir-asdf/README.md b/archive/src/elixir-asdf/README.md index 57e159dfb..ce1f97ec9 100644 --- a/archive/src/elixir-asdf/README.md +++ b/archive/src/elixir-asdf/README.md @@ -3,11 +3,11 @@ Elixir is a functional, concurrent, general-purpose programming language that runs on the BEAM virtual machine which is also used to implement the Erlang programming language. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/elixir-asdf:2": {} + "ghcr.io/devcontainers-extra/features/elixir-asdf:2": {} } ``` @@ -19,3 +19,7 @@ Elixir is a functional, concurrent, general-purpose programming language that ru | erlangVersion | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/erlang-asdf/README.md b/archive/src/erlang-asdf/README.md index 6850cdafc..0b5b2d9e2 100644 --- a/archive/src/erlang-asdf/README.md +++ b/archive/src/erlang-asdf/README.md @@ -3,11 +3,11 @@ Erlang is a general-purpose, concurrent, functional programming language, and a garbage-collected runtime system. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/erlang-asdf:2": {} + "ghcr.io/devcontainers-extra/features/erlang-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Erlang is a general-purpose, concurrent, functional programming language, and a | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/ffmpeg-homebrew/README.md b/archive/src/ffmpeg-homebrew/README.md index fd36a8458..4d692dc38 100644 --- a/archive/src/ffmpeg-homebrew/README.md +++ b/archive/src/ffmpeg-homebrew/README.md @@ -3,11 +3,11 @@ FFmpeg is the leading multimedia framework, able to decode, encode, transcode, mux, demux, stream, filter and play pretty much anything that humans and machines have created. It supports the most obscure ancient formats up to the cutting edge. No matter if they were designed by some standards committee, the community or a corporation. It is also highly portable: FFmpeg compiles, runs, and passes our testing infrastructure FATE across Linux, Mac OS X, Microsoft Windows, the BSDs, Solaris, etc. under a wide variety of build environments, machine architectures, and configurations. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ffmpeg-homebrew:1": {} + "ghcr.io/devcontainers-extra/features/ffmpeg-homebrew:1": {} } ``` @@ -18,3 +18,7 @@ FFmpeg is the leading multimedia framework, able to decode, encode, transcode, m | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/gh-release/README.md b/archive/src/gh-release/README.md index 16e427018..bd131e64a 100644 --- a/archive/src/gh-release/README.md +++ b/archive/src/gh-release/README.md @@ -3,11 +3,11 @@ Github Release feature will download, extract and add to path a release binary from a given repo -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/gh-release:1": {} + "ghcr.io/devcontainers-extra/features/gh-release:1": {} } ``` @@ -26,3 +26,7 @@ Github Release feature will download, extract and add to path a release binary f | additionalFlags | (Optional) Any additional flags for the 'nanolayer install gh-release' command | string | - | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/homebrew-package/README.md b/archive/src/homebrew-package/README.md index 05641345b..c3e323165 100644 --- a/archive/src/homebrew-package/README.md +++ b/archive/src/homebrew-package/README.md @@ -7,7 +7,7 @@ Installs a Homebrew package. ```json "features": { - "ghcr.io/devcontainers-contrib/features/homebrew-package:1": {} + "ghcr.io/devcontainers-extra/features/homebrew-package:1": {} } ``` @@ -23,4 +23,4 @@ Installs a Homebrew package. --- -_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers-contrib/features/blob/main/src/homebrew-package/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/ko/README.md b/archive/src/ko/README.md index e6fbb4d15..3c73cc540 100644 --- a/archive/src/ko/README.md +++ b/archive/src/ko/README.md @@ -3,11 +3,11 @@ ko is a simple, fast container image builder for Go applications. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ko:1": {} + "ghcr.io/devcontainers-extra/features/ko:1": {} } ``` @@ -18,3 +18,7 @@ ko is a simple, fast container image builder for Go applications. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/kubescape/README.md b/archive/src/kubescape/README.md index 2fef6e649..8ea76c01f 100644 --- a/archive/src/kubescape/README.md +++ b/archive/src/kubescape/README.md @@ -3,11 +3,11 @@ Kubescape is an open-source Kubernetes security platform. It includes risk analysis, security compliance, and misconfiguration scanning. Targeted at the DevSecOps practitioner or platform engineer, it offers an easy-to-use CLI interface, flexible output formats, and automated scanning capabilities. It saves Kubernetes users and admins precious time, effort, and resources. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/kubescape:1": {} + "ghcr.io/devcontainers-extra/features/kubescape:1": {} } ``` @@ -18,3 +18,7 @@ Kubescape is an open-source Kubernetes security platform. It includes risk analy | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/mage/README.md b/archive/src/mage/README.md index 7c8f59baf..8710751ea 100644 --- a/archive/src/mage/README.md +++ b/archive/src/mage/README.md @@ -3,11 +3,11 @@ Mage is a make-like build tool using Go. You write plain-old go functions, and Mage automatically uses them as Makefile-like runnable targets. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/mage:1": {} + "ghcr.io/devcontainers-extra/features/mage:1": {} } ``` @@ -18,3 +18,7 @@ Mage is a make-like build tool using Go. You write plain-old go functions, and M | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/meson-asdf/README.md b/archive/src/meson-asdf/README.md index 622d67f79..778b7e1e0 100644 --- a/archive/src/meson-asdf/README.md +++ b/archive/src/meson-asdf/README.md @@ -3,11 +3,11 @@ Meson is an open source build system meant to be both extremely fast, and, even more importantly, as user friendly as possible. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/meson-asdf:2": {} + "ghcr.io/devcontainers-extra/features/meson-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Meson is an open source build system meant to be both extremely fast, and, even | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/meteor-cli/README.md b/archive/src/meteor-cli/README.md index 9263236f9..90dca00a9 100644 --- a/archive/src/meteor-cli/README.md +++ b/archive/src/meteor-cli/README.md @@ -3,11 +3,11 @@ Meteor is a full-stack JavaScript platform for developing modern web and mobile applications. Meteor includes a key set of technologies for building connected-client reactive applications, a build tool, and a curated set of packages from the Node.js and general JavaScript community. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/meteor-cli:1": {} + "ghcr.io/devcontainers-extra/features/meteor-cli:1": {} } ``` @@ -18,3 +18,7 @@ Meteor is a full-stack JavaScript platform for developing modern web and mobile | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/nushell/README.md b/archive/src/nushell/README.md index 4d8e79639..57000d626 100644 --- a/archive/src/nushell/README.md +++ b/archive/src/nushell/README.md @@ -3,11 +3,11 @@ Nushell is a shell with a focus on operating on structured data in the command line. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/nushell:1": {} + "ghcr.io/devcontainers-extra/features/nushell:1": {} } ``` @@ -18,3 +18,7 @@ Nushell is a shell with a focus on operating on structured data in the command l | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/ory-keto/README.md b/archive/src/ory-keto/README.md index d4c7e3a4e..bcd4b0849 100644 --- a/archive/src/ory-keto/README.md +++ b/archive/src/ory-keto/README.md @@ -3,11 +3,11 @@ Ory Keto is an implementation of 'Zanzibar: Google's Consistent, Global Authorization System'. Ships gRPC, REST APIs, newSQL, and an easy and granular permission language. Supports ACL, RBAC, and other access models. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/ory-keto:1": {} + "ghcr.io/devcontainers-extra/features/ory-keto:1": {} } ``` @@ -18,3 +18,7 @@ Ory Keto is an implementation of 'Zanzibar: Google's Consistent, Global Authoriz | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/pip-audit/README.md b/archive/src/pip-audit/README.md index 6fac8fbe0..1c7e0d120 100644 --- a/archive/src/pip-audit/README.md +++ b/archive/src/pip-audit/README.md @@ -3,11 +3,11 @@ pip-audit is a tool for scanning Python environments for packages with known vulnerabilities. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/pip-audit:1": {} + "ghcr.io/devcontainers-extra/features/pip-audit:1": {} } ``` @@ -18,3 +18,7 @@ pip-audit is a tool for scanning Python environments for packages with known vul | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/porter/README.md b/archive/src/porter/README.md index 99b4bbcff..79f690622 100644 --- a/archive/src/porter/README.md +++ b/archive/src/porter/README.md @@ -3,11 +3,11 @@ Porter enables you to package your application artifact, client tools, configuration and deployment logic together as an installer that you can distribute, and install with a single command. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/porter:1": {} + "ghcr.io/devcontainers-extra/features/porter:1": {} } ``` @@ -16,15 +16,19 @@ Porter enables you to package your application artifact, client tools, configura | Options Id | Description | Type | Default Value | |-----|-----|-----|-----| | version | Select the version to install. Empty string installs the latest version. | string | latest | -| terraformMixinVersion | [Optional] Select the terraform mixin version to install. | string | | -| azMixinVersion | [Optional] Select the az mixin version to install. | string | | -| awsMixinVersion | [Optional] Select the aws mixin version to install. | string | | -| dockerMixinVersion | [Optional] Select the docker mixin version to install. | string | | -| dockerComposeMixinVersion | [Optional] Select the docker-compose mixin version to install. | string | | -| gcloudMixinVersion | [Optional] Select the gcloud mixin version to install. | string | | -| helmMixinVersion | [Optional] Select the helm mixin version to install. | string | | -| armMixinVersion | [Optional] Select the arm mixin version to install. | string | | -| azurePluginVersion | [Optional] Select the azure plugin version to install. | string | | -| kubernetesPluginVersion | [Optional] Select the kubernetes plugin version to install. | string | | +| terraformMixinVersion | Select the terraform mixin version to install. | string | - | +| azMixinVersion | Select the az mixin version to install. | string | - | +| awsMixinVersion | Select the aws mixin version to install. | string | - | +| dockerMixinVersion | Select the docker mixin version to install. | string | - | +| dockerComposeMixinVersion | Select the docker-compose mixin version to install. | string | - | +| gcloudMixinVersion | Select the gcloud mixin version to install. | string | - | +| helmMixinVersion | Select the helm mixin version to install. | string | - | +| armMixinVersion | Select the arm mixin version to install. | string | - | +| azurePluginVersion | Select the azure plugin version to install. | string | - | +| kubernetesPluginVersion | Select the kubernetes plugin version to install. | string | - | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/postgres-asdf/README.md b/archive/src/postgres-asdf/README.md index 3648ee7db..bbc6f39e3 100644 --- a/archive/src/postgres-asdf/README.md +++ b/archive/src/postgres-asdf/README.md @@ -3,11 +3,11 @@ PostgreSQL is a powerful, open source object-relational database system with over 35 years of active development that has earned it a strong reputation for reliability, feature robustness, and performance. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/postgres-asdf:1": {} + "ghcr.io/devcontainers-extra/features/postgres-asdf:1": {} } ``` @@ -18,3 +18,7 @@ PostgreSQL is a powerful, open source object-relational database system with ove | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/rabbitmq-asdf/README.md b/archive/src/rabbitmq-asdf/README.md index b80df508d..ce1759f08 100644 --- a/archive/src/rabbitmq-asdf/README.md +++ b/archive/src/rabbitmq-asdf/README.md @@ -3,11 +3,11 @@ RabbitMQ is a feature rich, multi-protocol messaging and streaming broker. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/rabbitmq-asdf:1": {} + "ghcr.io/devcontainers-extra/features/rabbitmq-asdf:1": {} } ``` @@ -19,3 +19,7 @@ RabbitMQ is a feature rich, multi-protocol messaging and streaming broker. | erlangVersion | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/scala-asdf/README.md b/archive/src/scala-asdf/README.md index fb2db54d8..6dddf7fc7 100644 --- a/archive/src/scala-asdf/README.md +++ b/archive/src/scala-asdf/README.md @@ -3,11 +3,11 @@ Scala combines object-oriented and functional programming in one concise, high-level language -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/scala-asdf:2": {} + "ghcr.io/devcontainers-extra/features/scala-asdf:2": {} } ``` @@ -18,3 +18,7 @@ Scala combines object-oriented and functional programming in one concise, high-l | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/serverless/README.md b/archive/src/serverless/README.md index 78ad77a09..f68609e83 100644 --- a/archive/src/serverless/README.md +++ b/archive/src/serverless/README.md @@ -3,11 +3,11 @@ Serverless Framework is a widely used open-source IaC tool that allows you to build, and deploy serverless applications on multiple cloud providers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/serverless:2": {} + "ghcr.io/devcontainers-extra/features/serverless:2": {} } ``` @@ -18,3 +18,7 @@ Serverless Framework is a widely used open-source IaC tool that allows you to bu | version | Select the version of Serverless Framework to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/sqlfmt/README.md b/archive/src/sqlfmt/README.md index 278abeddd..ec737ab18 100644 --- a/archive/src/sqlfmt/README.md +++ b/archive/src/sqlfmt/README.md @@ -3,11 +3,11 @@ sqlfmt formats your dbt SQL files so you don't have to. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/sqlfmt:1": {} + "ghcr.io/devcontainers-extra/features/sqlfmt:1": {} } ``` @@ -18,3 +18,7 @@ sqlfmt formats your dbt SQL files so you don't have to. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/syncthing/README.md b/archive/src/syncthing/README.md index f10131541..55ec85158 100644 --- a/archive/src/syncthing/README.md +++ b/archive/src/syncthing/README.md @@ -3,11 +3,11 @@ Syncthing is a continuous file synchronization program. It synchronizes files between two or more computers. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/syncthing:1": {} + "ghcr.io/devcontainers-extra/features/syncthing:1": {} } ``` @@ -18,3 +18,7 @@ Syncthing is a continuous file synchronization program. It synchronizes files be | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/trivy/README.md b/archive/src/trivy/README.md index 94e696d16..7b9aff0d4 100644 --- a/archive/src/trivy/README.md +++ b/archive/src/trivy/README.md @@ -3,11 +3,11 @@ Trivy is a comprehensive and versatile security scanner. It finds vulnerabilities, misconfigurations, secrets, SBOM in containers, Kubernetes, code repositories, clouds and more. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/trivy:1": {} + "ghcr.io/devcontainers-extra/features/trivy:1": {} } ``` @@ -18,3 +18,7 @@ Trivy is a comprehensive and versatile security scanner. It finds vulnerabilitie | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/vertx-sdkman/README.md b/archive/src/vertx-sdkman/README.md index 371efd5b3..d78798f9f 100644 --- a/archive/src/vertx-sdkman/README.md +++ b/archive/src/vertx-sdkman/README.md @@ -3,11 +3,11 @@ Vert.x is a tool-kit for building reactive applications on the JVM. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/vertx-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/vertx-sdkman:2": {} } ``` @@ -20,3 +20,7 @@ Vert.x is a tool-kit for building reactive applications on the JVM. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/visualvm-sdkman/README.md b/archive/src/visualvm-sdkman/README.md index 248280108..ee16508a5 100644 --- a/archive/src/visualvm-sdkman/README.md +++ b/archive/src/visualvm-sdkman/README.md @@ -5,11 +5,11 @@ VisualVM is a tool that provides a visual interface for viewing detailed information about Java applications while they are running on a Java Virtual Machine. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/visualvm-sdkman:2": {} + "ghcr.io/devcontainers-extra/features/visualvm-sdkman:2": {} } ``` @@ -22,3 +22,7 @@ Machine. | jdkDistro | Select or enter a JDK distribution to install | string | ms | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/volta/README.md b/archive/src/volta/README.md index 00353b3f0..3ad2af637 100644 --- a/archive/src/volta/README.md +++ b/archive/src/volta/README.md @@ -3,11 +3,11 @@ Volta is a hassle-free way to manage your JavaScript command-line tools. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/volta:1": {} + "ghcr.io/devcontainers-extra/features/volta:1": {} } ``` @@ -18,3 +18,7 @@ Volta is a hassle-free way to manage your JavaScript command-line tools. | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/archive/src/xplr/README.md b/archive/src/xplr/README.md index f5b487a49..66f210033 100644 --- a/archive/src/xplr/README.md +++ b/archive/src/xplr/README.md @@ -3,11 +3,11 @@ xplr is a terminal UI based file explorer that aims to increase our terminal productivity by being a flexible, interactive orchestrator for the ever growing awesome command-line utilities that work with the file-system. -## Example DevContainer Usage +## Example Usage ```json "features": { - "ghcr.io/devcontainers-contrib/features/xplr:1": {} + "ghcr.io/devcontainers-extra/features/xplr:1": {} } ``` @@ -18,3 +18,7 @@ xplr is a terminal UI based file explorer that aims to increase our terminal pro | version | Select the version to install. | string | latest | + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](devcontainer-feature.json). Add additional notes to a `NOTES.md`._ From 6d19d484f997ae5caba6022043dc9fdfa01a933b Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:30:10 +0000 Subject: [PATCH 27/38] chore: update issue templates --- .github/ISSUE_TEMPLATE/feature-bug-report.yaml | 6 +++--- .github/ISSUE_TEMPLATE/suggest-feature.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/feature-bug-report.yaml b/.github/ISSUE_TEMPLATE/feature-bug-report.yaml index 0addf0924..2e8c9dcae 100644 --- a/.github/ISSUE_TEMPLATE/feature-bug-report.yaml +++ b/.github/ISSUE_TEMPLATE/feature-bug-report.yaml @@ -1,4 +1,4 @@ -name: 🐞Bug Report🐞 +name: 🐞 Bug Report 🐞 description: File a bug report title: "[Bug]: " labels: ["bug", "triage"] @@ -11,13 +11,13 @@ body: id: version attributes: label: Feature id and version - description: "For example: ghcr.io/devcontainers-contrib/features/deno:1.0.2" + description: "For example: ghcr.io/devcontainers-extra/features/deno:1.0.2" validations: required: false - type: input id: image attributes: - label: Devcontainer base Image + label: Dev Container base Image description: "For example: mcr.microsoft.com/devcontainers/base:ubuntu" validations: required: false diff --git a/.github/ISSUE_TEMPLATE/suggest-feature.yaml b/.github/ISSUE_TEMPLATE/suggest-feature.yaml index e6fe72fa5..bdca8ef99 100644 --- a/.github/ISSUE_TEMPLATE/suggest-feature.yaml +++ b/.github/ISSUE_TEMPLATE/suggest-feature.yaml @@ -1,4 +1,4 @@ -name: 🔥Suggest a new Feature!🔥 +name: 🔥 Suggest a new Feature! 🔥 description: Missing a feature? Ask and you shall receive! title: "[Feature Request]: " labels: ["enhancement"] From 118b67c91b1e0514a56536c538a8a8a3cf4a9edf Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:57:50 +0000 Subject: [PATCH 28/38] chore: update README --- README.md | 30 +++++++++--------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 275c9530c..30c181539 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,15 @@ -# Devcontainer Features - -![Codespaces](https://img.shields.io/static/v1?style=for-the-badge&message=Codespaces&color=181717&logo=GitHub&logoColor=FFFFFF&label=) -![Devcontainers](https://img.shields.io/static/v1?style=for-the-badge&message=Devcontainers&color=2496ED&logo=Docker&logoColor=FFFFFF&label=) -![Python](https://img.shields.io/static/v1?style=for-the-badge&message=Python&color=3776AB&logo=Python&logoColor=FFFFFF&label=) +# Extra Dev Container Features 🐳 Extra add-in features for [devcontainers](https://code.visualstudio.com/docs/devcontainers/containers) and [GitHub Codespaces](https://github.com/features/codespaces) -
- -![](https://i.imgur.com/VgiY81S.png) +👀 Don't see your feature here? [🔥 Suggest a new feature! 🔥](https://github.com/devcontainers-extra/features/issues/new?template=suggest-feature.yaml) \ +🔍 Found a bug? [🐞 Open a bug report! 🐞](https://github.com/devcontainers-extra/features/issues/new?template=feature-bug-report.yaml) -
- -💻 Works with devcontainers \ -☁️ Works with GitHub Codespaces \ -👀 Don't see your feature here? [🔥Suggest a new feature!🔥](https://github.com/devcontainers-contrib/features/issues/new?template=suggest-feature.yaml) +## About This Fork +This repository is a continuation of the [devcontainers-contrib/features](https://github.com/devcontainers-contrib/features) project, initially developed by [danielbraun89](https://github.com/danielbraun89). The fork was created to maintain and enhance the project due to inactivity in the original repository. ## Usage @@ -29,16 +21,16 @@ of an `options` object. ```json { - "image": "mcr.microsoft.com/devcontainers/universal", + "image": "mcr.microsoft.com/devcontainers/base:bookworm", "features": { - "ghcr.io/devcontainers-contrib/features/deno": {}, - "ghcr.io/devcontainers-contrib/features/neovim": {} + "ghcr.io/devcontainers-extra/features/deno": {}, + "ghcr.io/devcontainers-extra/features/neovim": {} } } ``` Then, after adding your devcontainer config file, you can open it in GitHub -Codespaces, or [open it locally using VS Code]. Be warned some features will +Codespaces, or \[open it locally using VS Code\]. Be warned some features will compile things from source code and may take a while!
@@ -46,7 +38,3 @@ compile things from source code and may take a while! ![](https://i.imgur.com/JMdowst.png)
- -### Found a bug? - -[🐞Open an bug report!🐞](https://github.com/devcontainers-contrib/features/issues/new?template=feature-bug-report.yaml) From 3e2c060b80935c7eba35ecea3d2346c36e65387a Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Mon, 30 Sep 2024 17:58:33 +0000 Subject: [PATCH 29/38] chore: update LICENSE --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 3fb8ff2e8..9abfedbcb 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2022 devcontainers-contrib +Copyright (c) 2024 devcontainers-extra Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal From 05d2886b06883c1d5158cc4df360061300b5714e Mon Sep 17 00:00:00 2001 From: Dan Mills Date: Tue, 8 Oct 2024 15:22:45 -0700 Subject: [PATCH 30/38] Note that this is not officially related to the Devcontainers project Signed-off-by: Dan Mills --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 30c181539..8475fc410 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,9 @@ This repository is a continuation of the [devcontainers-contrib/features](https://github.com/devcontainers-contrib/features) project, initially developed by [danielbraun89](https://github.com/danielbraun89). The fork was created to maintain and enhance the project due to inactivity in the original repository. +> [!NOTE] +> This project is not officially associated with or endoresed by the official [Devcontainers](https://containers.dev/) project + ## Usage Just add a `.devcontainer/devcontainer.json` file with a `features` key. It's From 41a688cfcc62e4d6b998aa51d478958f78acf326 Mon Sep 17 00:00:00 2001 From: Dan Mills Date: Wed, 9 Oct 2024 09:54:01 -0700 Subject: [PATCH 31/38] Fix typo Co-authored-by: Arek Kalandyk <36413794+koralowiec@users.noreply.github.com> Signed-off-by: Dan Mills --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8475fc410..862e611bf 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ This repository is a continuation of the [devcontainers-contrib/features](https://github.com/devcontainers-contrib/features) project, initially developed by [danielbraun89](https://github.com/danielbraun89). The fork was created to maintain and enhance the project due to inactivity in the original repository. > [!NOTE] -> This project is not officially associated with or endoresed by the official [Devcontainers](https://containers.dev/) project +> This project is not officially associated with or endorsed by the official [Devcontainers](https://containers.dev/) project ## Usage From 63cc19e95312b05b27dffa45b9e3fe9e6eb39a0e Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:20:03 +0000 Subject: [PATCH 32/38] chore: add unarchive command in justfile --- justfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/justfile b/justfile index d195171f5..22fe23345 100644 --- a/justfile +++ b/justfile @@ -5,3 +5,7 @@ test feature-name: test-scenario feature-name scenario-filter: devcontainer features test -f {{feature-name}} --filter "{{scenario-filter}}" --skip-autogenerated + +unarchive feature-name: + mv archive/src/{{feature-name}} src/ + mv archive/test/{{feature-name}} test/ \ No newline at end of file From f7453e224e9b0803b8c911e2bc45ea8f3861186a Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:21:31 +0000 Subject: [PATCH 33/38] chore: unarchive gh-release --- {archive/src => src}/gh-release/README.md | 0 {archive/src => src}/gh-release/devcontainer-feature.json | 0 {archive/src => src}/gh-release/install.sh | 0 {archive/src => src}/gh-release/library_scripts.sh | 0 {archive/test => test}/gh-release/scenarios.json | 0 {archive/test => test}/gh-release/test_act.sh | 0 {archive/test => test}/gh-release/test_apiops.sh | 0 {archive/test => test}/gh-release/test_defaults_alpine.sh | 0 {archive/test => test}/gh-release/test_defaults_debian.sh | 0 {archive/test => test}/gh-release/test_etcd.sh | 0 {archive/test => test}/gh-release/test_no_git.sh | 0 {archive/test => test}/gh-release/test_powershell.sh | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename {archive/src => src}/gh-release/README.md (100%) rename {archive/src => src}/gh-release/devcontainer-feature.json (100%) rename {archive/src => src}/gh-release/install.sh (100%) rename {archive/src => src}/gh-release/library_scripts.sh (100%) rename {archive/test => test}/gh-release/scenarios.json (100%) rename {archive/test => test}/gh-release/test_act.sh (100%) rename {archive/test => test}/gh-release/test_apiops.sh (100%) rename {archive/test => test}/gh-release/test_defaults_alpine.sh (100%) rename {archive/test => test}/gh-release/test_defaults_debian.sh (100%) rename {archive/test => test}/gh-release/test_etcd.sh (100%) rename {archive/test => test}/gh-release/test_no_git.sh (100%) rename {archive/test => test}/gh-release/test_powershell.sh (100%) diff --git a/archive/src/gh-release/README.md b/src/gh-release/README.md similarity index 100% rename from archive/src/gh-release/README.md rename to src/gh-release/README.md diff --git a/archive/src/gh-release/devcontainer-feature.json b/src/gh-release/devcontainer-feature.json similarity index 100% rename from archive/src/gh-release/devcontainer-feature.json rename to src/gh-release/devcontainer-feature.json diff --git a/archive/src/gh-release/install.sh b/src/gh-release/install.sh similarity index 100% rename from archive/src/gh-release/install.sh rename to src/gh-release/install.sh diff --git a/archive/src/gh-release/library_scripts.sh b/src/gh-release/library_scripts.sh similarity index 100% rename from archive/src/gh-release/library_scripts.sh rename to src/gh-release/library_scripts.sh diff --git a/archive/test/gh-release/scenarios.json b/test/gh-release/scenarios.json similarity index 100% rename from archive/test/gh-release/scenarios.json rename to test/gh-release/scenarios.json diff --git a/archive/test/gh-release/test_act.sh b/test/gh-release/test_act.sh similarity index 100% rename from archive/test/gh-release/test_act.sh rename to test/gh-release/test_act.sh diff --git a/archive/test/gh-release/test_apiops.sh b/test/gh-release/test_apiops.sh similarity index 100% rename from archive/test/gh-release/test_apiops.sh rename to test/gh-release/test_apiops.sh diff --git a/archive/test/gh-release/test_defaults_alpine.sh b/test/gh-release/test_defaults_alpine.sh similarity index 100% rename from archive/test/gh-release/test_defaults_alpine.sh rename to test/gh-release/test_defaults_alpine.sh diff --git a/archive/test/gh-release/test_defaults_debian.sh b/test/gh-release/test_defaults_debian.sh similarity index 100% rename from archive/test/gh-release/test_defaults_debian.sh rename to test/gh-release/test_defaults_debian.sh diff --git a/archive/test/gh-release/test_etcd.sh b/test/gh-release/test_etcd.sh similarity index 100% rename from archive/test/gh-release/test_etcd.sh rename to test/gh-release/test_etcd.sh diff --git a/archive/test/gh-release/test_no_git.sh b/test/gh-release/test_no_git.sh similarity index 100% rename from archive/test/gh-release/test_no_git.sh rename to test/gh-release/test_no_git.sh diff --git a/archive/test/gh-release/test_powershell.sh b/test/gh-release/test_powershell.sh similarity index 100% rename from archive/test/gh-release/test_powershell.sh rename to test/gh-release/test_powershell.sh From c6ce7860f7b84c1988fe3ff05595b148c77f1513 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:23:11 +0000 Subject: [PATCH 34/38] fix(gh-release): update regex for a test scenario --- test/gh-release/scenarios.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gh-release/scenarios.json b/test/gh-release/scenarios.json index 461967d2b..1b1aee84d 100644 --- a/test/gh-release/scenarios.json +++ b/test/gh-release/scenarios.json @@ -58,7 +58,7 @@ "version": "latest", "repo": "Azure/apiops", "binaryNames": "extractor", - "assetRegex": "^extractor\\.linux-.+\\.exe$", + "assetRegex": "^extractor-linux-.+\\.zip$", "additionalFlags": "--no-filter-assets-by-platform" } } From 37b4134af212ead6c90290267281524b381bff9a Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:42:55 +0000 Subject: [PATCH 35/38] fix(age): add additional asset regex --- {archive/src => src}/age/README.md | 0 {archive/src => src}/age/devcontainer-feature.json | 0 {archive/src => src}/age/install.sh | 7 +------ {archive/src => src}/age/library_scripts.sh | 0 {archive/test => test}/age/scenarios.json | 0 {archive/test => test}/age/test_defaults_debian.sh | 0 6 files changed, 1 insertion(+), 6 deletions(-) rename {archive/src => src}/age/README.md (100%) rename {archive/src => src}/age/devcontainer-feature.json (100%) rename {archive/src => src}/age/install.sh (91%) rename {archive/src => src}/age/library_scripts.sh (100%) rename {archive/test => test}/age/scenarios.json (100%) rename {archive/test => test}/age/test_defaults_debian.sh (100%) diff --git a/archive/src/age/README.md b/src/age/README.md similarity index 100% rename from archive/src/age/README.md rename to src/age/README.md diff --git a/archive/src/age/devcontainer-feature.json b/src/age/devcontainer-feature.json similarity index 100% rename from archive/src/age/devcontainer-feature.json rename to src/age/devcontainer-feature.json diff --git a/archive/src/age/install.sh b/src/age/install.sh similarity index 91% rename from archive/src/age/install.sh rename to src/age/install.sh index 629d515c8..f58ed7613 100755 --- a/archive/src/age/install.sh +++ b/src/age/install.sh @@ -1,4 +1,3 @@ - set -e . ./library_scripts.sh @@ -10,14 +9,10 @@ set -e # of the script ensure_nanolayer nanolayer_location "v0.5.4" - $nanolayer_location \ install \ devcontainer-feature \ "ghcr.io/devcontainers-contrib/features/gh-release:1.0.23" \ - --option repo='filosottile/age' --option binaryNames='age' --option version="$VERSION" - - + --option repo='filosottile/age' --option binaryNames='age' --option version="$VERSION" --option assetRegex='.*(.tar.gz)$' echo 'Done!' - diff --git a/archive/src/age/library_scripts.sh b/src/age/library_scripts.sh similarity index 100% rename from archive/src/age/library_scripts.sh rename to src/age/library_scripts.sh diff --git a/archive/test/age/scenarios.json b/test/age/scenarios.json similarity index 100% rename from archive/test/age/scenarios.json rename to test/age/scenarios.json diff --git a/archive/test/age/test_defaults_debian.sh b/test/age/test_defaults_debian.sh similarity index 100% rename from archive/test/age/test_defaults_debian.sh rename to test/age/test_defaults_debian.sh From 38f904ded7db87a28b6207806fba078d94ff4ec7 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 13 Oct 2024 08:25:16 +0000 Subject: [PATCH 36/38] chore: add just command for creating feature from template --- justfile | 9 +- templates/src/devcontainer-feature.json | 18 +++ templates/src/install.sh | 21 +++ templates/src/library_scripts.sh | 173 ++++++++++++++++++++++++ templates/test/scenarios.json | 16 +++ templates/test/test.sh | 9 ++ templates/test/test_debian.sh | 9 ++ templates/test/test_specific_version.sh | 9 ++ 8 files changed, 263 insertions(+), 1 deletion(-) create mode 100644 templates/src/devcontainer-feature.json create mode 100755 templates/src/install.sh create mode 100644 templates/src/library_scripts.sh create mode 100644 templates/test/scenarios.json create mode 100755 templates/test/test.sh create mode 100755 templates/test/test_debian.sh create mode 100755 templates/test/test_specific_version.sh diff --git a/justfile b/justfile index 22fe23345..ca074cf21 100644 --- a/justfile +++ b/justfile @@ -8,4 +8,11 @@ test-scenario feature-name scenario-filter: unarchive feature-name: mv archive/src/{{feature-name}} src/ - mv archive/test/{{feature-name}} test/ \ No newline at end of file + mv archive/test/{{feature-name}} test/ + +add feature-name: + mkdir -p src/{{feature-name}} test/{{feature-name}} + cp templates/src/* src/{{feature-name}}/ + cp templates/test/* test/{{feature-name}}/ + sed -i "s//{{feature-name}}/g" src/{{feature-name}}/* + sed -i "s//{{feature-name}}/g" test/{{feature-name}}/* \ No newline at end of file diff --git a/templates/src/devcontainer-feature.json b/templates/src/devcontainer-feature.json new file mode 100644 index 000000000..07b2de27d --- /dev/null +++ b/templates/src/devcontainer-feature.json @@ -0,0 +1,18 @@ +{ + "id": "", + "version": "1.0.0", + "name": " (via )", + "documentationURL": "http://github.com/devcontainers-extra/features/tree/main/src/", + "description": "", + "options": { + "version": { + "default": "latest", + "description": "Select the version to install.", + "proposals": [ + "latest" + ], + "type": "string" + } + }, + "installsAfter": [] +} \ No newline at end of file diff --git a/templates/src/install.sh b/templates/src/install.sh new file mode 100755 index 000000000..ed1b14622 --- /dev/null +++ b/templates/src/install.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +set -e + +source ./library_scripts.sh + +# nanolayer is a cli utility which keeps container layers as small as possible +# source code: https://github.com/devcontainers-contrib/nanolayer +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end +# of the script +ensure_nanolayer nanolayer_location "v0.5.6" + +# Example nanolayer installation via devcontainer-feature +$nanolayer_location \ + install \ + devcontainer-feature \ + "ghcr.io/devcontainers-extra/features/gh-release:1" \ + --option repo='cli/cli' --option binaryNames='gh' --option version="$VERSION" + +echo 'Done!' diff --git a/templates/src/library_scripts.sh b/templates/src/library_scripts.sh new file mode 100644 index 000000000..496144c47 --- /dev/null +++ b/templates/src/library_scripts.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash + +clean_download() { + # The purpose of this function is to download a file with minimal impact on container layer size + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to + # 1. uninstall the downloader at the return of the function + # 2. revert back any changes to the package installer database/cache (for example apt-get lists) + # The above steps will minimize the leftovers being created while installing the downloader + # Supported distros: + # debian/ubuntu/alpine + + url=$1 + output_location=$2 + tempdir=$(mktemp -d) + downloader_installed="" + + function _apt_get_install() { + tempdir=$1 + + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir + apt-get update -y + apt-get -y install --no-install-recommends wget ca-certificates + } + + function _apt_get_cleanup() { + tempdir=$1 + + echo "removing wget" + apt-get -y purge wget --auto-remove + + echo "revert back apt lists" + rm -rf /var/lib/apt/lists/* + rm -r /var/lib/apt/lists && mv $tempdir/lists /var/lib/apt/lists + } + + function _apk_install() { + tempdir=$1 + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir + + apk add --no-cache wget + } + + function _apk_cleanup() { + tempdir=$1 + + echo "removing wget" + apk del wget + } + # try to use either wget or curl if one of them already installer + if type curl >/dev/null 2>&1; then + downloader=curl + elif type wget >/dev/null 2>&1; then + downloader=wget + else + downloader="" + fi + + # in case none of them is installed, install wget temporarly + if [ -z $downloader ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_install $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_install $tempdir + else + echo "distro not supported" + exit 1 + fi + downloader="wget" + downloader_installed="true" + fi + + if [ $downloader = "wget" ]; then + wget -q $url -O $output_location + else + curl -sfL $url -o $output_location + fi + + # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because + # alpine lack bash, and RETURN is not a valid signal under sh shell + if ! [ -z $downloader_installed ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_cleanup $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_cleanup $tempdir + else + echo "distro not supported" + exit 1 + fi + fi + +} + +ensure_nanolayer() { + # Ensure existance of the nanolayer cli program + local variable_name=$1 + + local required_version=$2 + # normalize version + if ! [[ $required_version == v* ]]; then + required_version=v$required_version + fi + + local nanolayer_location="" + + # If possible - try to use an already installed nanolayer + if [[ -z "${NANOLAYER_FORCE_CLI_INSTALLATION}" ]]; then + if [[ -z "${NANOLAYER_CLI_LOCATION}" ]]; then + if type nanolayer >/dev/null 2>&1; then + echo "Found a pre-existing nanolayer in PATH" + nanolayer_location=nanolayer + fi + elif [ -f "${NANOLAYER_CLI_LOCATION}" ] && [ -x "${NANOLAYER_CLI_LOCATION}" ]; then + nanolayer_location=${NANOLAYER_CLI_LOCATION} + echo "Found a pre-existing nanolayer which were given in env variable: $nanolayer_location" + fi + + # make sure its of the required version + if ! [[ -z "${nanolayer_location}" ]]; then + local current_version + current_version=$($nanolayer_location --version) + if ! [[ $current_version == v* ]]; then + current_version=v$current_version + fi + + if ! [ $current_version == $required_version ]; then + echo "skipping usage of pre-existing nanolayer. (required version $required_version does not match existing version $current_version)" + nanolayer_location="" + fi + fi + + fi + + # If not previuse installation found, download it temporarly and delete at the end of the script + if [[ -z "${nanolayer_location}" ]]; then + + if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then + tmp_dir=$(mktemp -d -t nanolayer-XXXXXXXXXX) + + clean_up() { + ARG=$? + rm -rf $tmp_dir + exit $ARG + } + trap clean_up EXIT + + if [ -x "/sbin/apk" ]; then + clib_type=musl + else + clib_type=gnu + fi + + tar_filename=nanolayer-"$(uname -m)"-unknown-linux-$clib_type.tgz + + # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed + clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename + + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" + chmod a+x $tmp_dir/nanolayer + nanolayer_location=$tmp_dir/nanolayer + + else + echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" + exit 1 + fi + fi + + # Expose outside the resolved location + declare -g ${variable_name}=$nanolayer_location + +} diff --git a/templates/test/scenarios.json b/templates/test/scenarios.json new file mode 100644 index 000000000..48ca39c54 --- /dev/null +++ b/templates/test/scenarios.json @@ -0,0 +1,16 @@ +{ + "test_debian": { + "image": "mcr.microsoft.com/devcontainers/base:debian", + "features": { + "": {} + } + }, + "test_specific_version": { + "image": "mcr.microsoft.com/devcontainers/base:debian", + "features": { + "": { + "version": "x.y.z" + } + } + } +} \ No newline at end of file diff --git a/templates/test/test.sh b/templates/test/test.sh new file mode 100755 index 000000000..b2873a73c --- /dev/null +++ b/templates/test/test.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "something is installed" something --version + +reportResults diff --git a/templates/test/test_debian.sh b/templates/test/test_debian.sh new file mode 100755 index 000000000..b2873a73c --- /dev/null +++ b/templates/test/test_debian.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "something is installed" something --version + +reportResults diff --git a/templates/test/test_specific_version.sh b/templates/test/test_specific_version.sh new file mode 100755 index 000000000..e4858fa21 --- /dev/null +++ b/templates/test/test_specific_version.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "something version is equal to x.y.z" sh -c "something --version | grep 'x.y.z'" + +reportResults From ab15c304412eed7344f92c386e7a507f11f2e522 Mon Sep 17 00:00:00 2001 From: koralowiec <36413794+koralowiec@users.noreply.github.com> Date: Sun, 13 Oct 2024 08:27:28 +0000 Subject: [PATCH 37/38] feat: add aws-sso-cli feature --- src/aws-sso-cli/devcontainer-feature.json | 18 +++ src/aws-sso-cli/install.sh | 21 +++ src/aws-sso-cli/library_scripts.sh | 173 ++++++++++++++++++++++ test/aws-sso-cli/scenarios.json | 16 ++ test/aws-sso-cli/test.sh | 9 ++ test/aws-sso-cli/test_debian.sh | 9 ++ test/aws-sso-cli/test_specific_version.sh | 9 ++ 7 files changed, 255 insertions(+) create mode 100644 src/aws-sso-cli/devcontainer-feature.json create mode 100755 src/aws-sso-cli/install.sh create mode 100644 src/aws-sso-cli/library_scripts.sh create mode 100644 test/aws-sso-cli/scenarios.json create mode 100755 test/aws-sso-cli/test.sh create mode 100755 test/aws-sso-cli/test_debian.sh create mode 100755 test/aws-sso-cli/test_specific_version.sh diff --git a/src/aws-sso-cli/devcontainer-feature.json b/src/aws-sso-cli/devcontainer-feature.json new file mode 100644 index 000000000..359b99a4d --- /dev/null +++ b/src/aws-sso-cli/devcontainer-feature.json @@ -0,0 +1,18 @@ +{ + "id": "aws-sso-cli", + "version": "1.0.0", + "name": "aws-sso-cli (via Github Releases)", + "documentationURL": "http://github.com/devcontainers-extra/features/tree/main/src/aws-sso-cli", + "description": "", + "options": { + "version": { + "default": "latest", + "description": "Select the version to install.", + "proposals": [ + "latest" + ], + "type": "string" + } + }, + "installsAfter": [] +} \ No newline at end of file diff --git a/src/aws-sso-cli/install.sh b/src/aws-sso-cli/install.sh new file mode 100755 index 000000000..9b60b21b1 --- /dev/null +++ b/src/aws-sso-cli/install.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +set -e + +source ./library_scripts.sh + +# nanolayer is a cli utility which keeps container layers as small as possible +# source code: https://github.com/devcontainers-contrib/nanolayer +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end +# of the script +ensure_nanolayer nanolayer_location "v0.5.6" + +$nanolayer_location \ + install \ + devcontainer-feature \ + "ghcr.io/devcontainers-extra/features/gh-release:1" \ + --option repo='synfinatic/aws-sso-cli' --option binaryNames='aws-sso' \ + --option version="$VERSION" --option releaseTagRegex='^(?!.*beta).*$' + +echo 'Done!' diff --git a/src/aws-sso-cli/library_scripts.sh b/src/aws-sso-cli/library_scripts.sh new file mode 100644 index 000000000..496144c47 --- /dev/null +++ b/src/aws-sso-cli/library_scripts.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash + +clean_download() { + # The purpose of this function is to download a file with minimal impact on container layer size + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to + # 1. uninstall the downloader at the return of the function + # 2. revert back any changes to the package installer database/cache (for example apt-get lists) + # The above steps will minimize the leftovers being created while installing the downloader + # Supported distros: + # debian/ubuntu/alpine + + url=$1 + output_location=$2 + tempdir=$(mktemp -d) + downloader_installed="" + + function _apt_get_install() { + tempdir=$1 + + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir + apt-get update -y + apt-get -y install --no-install-recommends wget ca-certificates + } + + function _apt_get_cleanup() { + tempdir=$1 + + echo "removing wget" + apt-get -y purge wget --auto-remove + + echo "revert back apt lists" + rm -rf /var/lib/apt/lists/* + rm -r /var/lib/apt/lists && mv $tempdir/lists /var/lib/apt/lists + } + + function _apk_install() { + tempdir=$1 + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir + + apk add --no-cache wget + } + + function _apk_cleanup() { + tempdir=$1 + + echo "removing wget" + apk del wget + } + # try to use either wget or curl if one of them already installer + if type curl >/dev/null 2>&1; then + downloader=curl + elif type wget >/dev/null 2>&1; then + downloader=wget + else + downloader="" + fi + + # in case none of them is installed, install wget temporarly + if [ -z $downloader ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_install $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_install $tempdir + else + echo "distro not supported" + exit 1 + fi + downloader="wget" + downloader_installed="true" + fi + + if [ $downloader = "wget" ]; then + wget -q $url -O $output_location + else + curl -sfL $url -o $output_location + fi + + # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because + # alpine lack bash, and RETURN is not a valid signal under sh shell + if ! [ -z $downloader_installed ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_cleanup $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_cleanup $tempdir + else + echo "distro not supported" + exit 1 + fi + fi + +} + +ensure_nanolayer() { + # Ensure existance of the nanolayer cli program + local variable_name=$1 + + local required_version=$2 + # normalize version + if ! [[ $required_version == v* ]]; then + required_version=v$required_version + fi + + local nanolayer_location="" + + # If possible - try to use an already installed nanolayer + if [[ -z "${NANOLAYER_FORCE_CLI_INSTALLATION}" ]]; then + if [[ -z "${NANOLAYER_CLI_LOCATION}" ]]; then + if type nanolayer >/dev/null 2>&1; then + echo "Found a pre-existing nanolayer in PATH" + nanolayer_location=nanolayer + fi + elif [ -f "${NANOLAYER_CLI_LOCATION}" ] && [ -x "${NANOLAYER_CLI_LOCATION}" ]; then + nanolayer_location=${NANOLAYER_CLI_LOCATION} + echo "Found a pre-existing nanolayer which were given in env variable: $nanolayer_location" + fi + + # make sure its of the required version + if ! [[ -z "${nanolayer_location}" ]]; then + local current_version + current_version=$($nanolayer_location --version) + if ! [[ $current_version == v* ]]; then + current_version=v$current_version + fi + + if ! [ $current_version == $required_version ]; then + echo "skipping usage of pre-existing nanolayer. (required version $required_version does not match existing version $current_version)" + nanolayer_location="" + fi + fi + + fi + + # If not previuse installation found, download it temporarly and delete at the end of the script + if [[ -z "${nanolayer_location}" ]]; then + + if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then + tmp_dir=$(mktemp -d -t nanolayer-XXXXXXXXXX) + + clean_up() { + ARG=$? + rm -rf $tmp_dir + exit $ARG + } + trap clean_up EXIT + + if [ -x "/sbin/apk" ]; then + clib_type=musl + else + clib_type=gnu + fi + + tar_filename=nanolayer-"$(uname -m)"-unknown-linux-$clib_type.tgz + + # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed + clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename + + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" + chmod a+x $tmp_dir/nanolayer + nanolayer_location=$tmp_dir/nanolayer + + else + echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" + exit 1 + fi + fi + + # Expose outside the resolved location + declare -g ${variable_name}=$nanolayer_location + +} diff --git a/test/aws-sso-cli/scenarios.json b/test/aws-sso-cli/scenarios.json new file mode 100644 index 000000000..0cbfa6187 --- /dev/null +++ b/test/aws-sso-cli/scenarios.json @@ -0,0 +1,16 @@ +{ + "test_debian": { + "image": "mcr.microsoft.com/devcontainers/base:debian", + "features": { + "aws-sso-cli": {} + } + }, + "test_specific_version": { + "image": "mcr.microsoft.com/devcontainers/base:debian", + "features": { + "aws-sso-cli": { + "version": "1.16.1" + } + } + } +} \ No newline at end of file diff --git a/test/aws-sso-cli/test.sh b/test/aws-sso-cli/test.sh new file mode 100755 index 000000000..8a4eed5eb --- /dev/null +++ b/test/aws-sso-cli/test.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "aws-sso is installed" aws-sso version + +reportResults diff --git a/test/aws-sso-cli/test_debian.sh b/test/aws-sso-cli/test_debian.sh new file mode 100755 index 000000000..8a4eed5eb --- /dev/null +++ b/test/aws-sso-cli/test_debian.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "aws-sso is installed" aws-sso version + +reportResults diff --git a/test/aws-sso-cli/test_specific_version.sh b/test/aws-sso-cli/test_specific_version.sh new file mode 100755 index 000000000..24589417e --- /dev/null +++ b/test/aws-sso-cli/test_specific_version.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "aws-sso version is equal to 1.16.1" sh -c "aws-sso version | grep '1.16.1'" + +reportResults From ef2f6a18d02f91a9c478be3948f06fb2802b64e4 Mon Sep 17 00:00:00 2001 From: Tom Plant Date: Tue, 15 Oct 2024 08:12:04 +0000 Subject: [PATCH 38/38] feat: add Namespace CLI via Github Releases Signed-off-by: Tom Plant --- src/namespace-cli/README.md | 17 ++ src/namespace-cli/devcontainer-feature.json | 20 +++ src/namespace-cli/install.sh | 21 +++ src/namespace-cli/library_scripts.sh | 173 ++++++++++++++++++++ test/namespace-cli/scenarios.json | 8 + test/namespace-cli/test.sh | 10 ++ 6 files changed, 249 insertions(+) create mode 100644 src/namespace-cli/README.md create mode 100644 src/namespace-cli/devcontainer-feature.json create mode 100644 src/namespace-cli/install.sh create mode 100644 src/namespace-cli/library_scripts.sh create mode 100644 test/namespace-cli/scenarios.json create mode 100644 test/namespace-cli/test.sh diff --git a/src/namespace-cli/README.md b/src/namespace-cli/README.md new file mode 100644 index 000000000..5f78dafcb --- /dev/null +++ b/src/namespace-cli/README.md @@ -0,0 +1,17 @@ +# Namespace CLI (via Github Releases) + +The CLI for [Namespace](https://namespace.so). + +## Example DevContainer Usage + +```json +"features": { + "ghcr.io/devcontainers-contrib/features/namespace-cli:1": {} +} +``` + +## Options + +| Options Id | Description | Type | Default Value | +|-----|-----|-----|-----| +| version | Select the version to install. | string | latest | diff --git a/src/namespace-cli/devcontainer-feature.json b/src/namespace-cli/devcontainer-feature.json new file mode 100644 index 000000000..09cec1df8 --- /dev/null +++ b/src/namespace-cli/devcontainer-feature.json @@ -0,0 +1,20 @@ +{ + "id": "namespace-cli", + "version": "1.0.0", + "name": "Namespace CLI (via Github Releases)", + "documentationURL": "http://github.com/devcontainers-extra/features/tree/main/src/namespace-cli", + "description": "The CLI for https://namespace.so", + "options": { + "version": { + "default": "latest", + "description": "Select the version to install.", + "proposals": [ + "latest" + ], + "type": "string" + } + }, + "installsAfter": [ + "ghcr.io/devcontainers-extra/features/gh-release" + ] +} \ No newline at end of file diff --git a/src/namespace-cli/install.sh b/src/namespace-cli/install.sh new file mode 100644 index 000000000..211ab8380 --- /dev/null +++ b/src/namespace-cli/install.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +set -e + +source ./library_scripts.sh + +# nanolayer is a cli utility which keeps container layers as small as possible +# source code: https://github.com/devcontainers-contrib/nanolayer +# `ensure_nanolayer` is a bash function that will find any existing nanolayer installations, +# and if missing - will download a temporary copy that automatically get deleted at the end +# of the script +ensure_nanolayer nanolayer_location "v0.5.6" + +# Example nanolayer installation via devcontainer-feature +$nanolayer_location \ + install \ + devcontainer-feature \ + "ghcr.io/devcontainers-extra/features/gh-release:1" \ + --option repo='namespacelabs/foundation' --option binaryNames='nsc,docker-credential-nsc' --option version="$VERSION" --option libName='docker-credential-nsc' + +echo 'Done!' \ No newline at end of file diff --git a/src/namespace-cli/library_scripts.sh b/src/namespace-cli/library_scripts.sh new file mode 100644 index 000000000..940b88651 --- /dev/null +++ b/src/namespace-cli/library_scripts.sh @@ -0,0 +1,173 @@ +#!/usr/bin/env bash + +clean_download() { + # The purpose of this function is to download a file with minimal impact on container layer size + # this means if no valid downloader is found (curl or wget) then we install a downloader (currently wget) in a + # temporary manner, and making sure to + # 1. uninstall the downloader at the return of the function + # 2. revert back any changes to the package installer database/cache (for example apt-get lists) + # The above steps will minimize the leftovers being created while installing the downloader + # Supported distros: + # debian/ubuntu/alpine + + url=$1 + output_location=$2 + tempdir=$(mktemp -d) + downloader_installed="" + + function _apt_get_install() { + tempdir=$1 + + # copy current state of apt list - in order to revert back later (minimize contianer layer size) + cp -p -R /var/lib/apt/lists $tempdir + apt-get update -y + apt-get -y install --no-install-recommends wget ca-certificates + } + + function _apt_get_cleanup() { + tempdir=$1 + + echo "removing wget" + apt-get -y purge wget --auto-remove + + echo "revert back apt lists" + rm -rf /var/lib/apt/lists/* + rm -r /var/lib/apt/lists && mv $tempdir/lists /var/lib/apt/lists + } + + function _apk_install() { + tempdir=$1 + # copy current state of apk cache - in order to revert back later (minimize contianer layer size) + cp -p -R /var/cache/apk $tempdir + + apk add --no-cache wget + } + + function _apk_cleanup() { + tempdir=$1 + + echo "removing wget" + apk del wget + } + # try to use either wget or curl if one of them already installer + if type curl >/dev/null 2>&1; then + downloader=curl + elif type wget >/dev/null 2>&1; then + downloader=wget + else + downloader="" + fi + + # in case none of them is installed, install wget temporarly + if [ -z $downloader ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_install $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_install $tempdir + else + echo "distro not supported" + exit 1 + fi + downloader="wget" + downloader_installed="true" + fi + + if [ $downloader = "wget" ]; then + wget -q $url -O $output_location + else + curl -sfL $url -o $output_location + fi + + # NOTE: the cleanup procedure was not implemented using `trap X RETURN` only because + # alpine lack bash, and RETURN is not a valid signal under sh shell + if ! [ -z $downloader_installed ]; then + if [ -x "/usr/bin/apt-get" ]; then + _apt_get_cleanup $tempdir + elif [ -x "/sbin/apk" ]; then + _apk_cleanup $tempdir + else + echo "distro not supported" + exit 1 + fi + fi + +} + +ensure_nanolayer() { + # Ensure existance of the nanolayer cli program + local variable_name=$1 + + local required_version=$2 + # normalize version + if ! [[ $required_version == v* ]]; then + required_version=v$required_version + fi + + local nanolayer_location="" + + # If possible - try to use an already installed nanolayer + if [[ -z "${NANOLAYER_FORCE_CLI_INSTALLATION}" ]]; then + if [[ -z "${NANOLAYER_CLI_LOCATION}" ]]; then + if type nanolayer >/dev/null 2>&1; then + echo "Found a pre-existing nanolayer in PATH" + nanolayer_location=nanolayer + fi + elif [ -f "${NANOLAYER_CLI_LOCATION}" ] && [ -x "${NANOLAYER_CLI_LOCATION}" ]; then + nanolayer_location=${NANOLAYER_CLI_LOCATION} + echo "Found a pre-existing nanolayer which were given in env variable: $nanolayer_location" + fi + + # make sure its of the required version + if ! [[ -z "${nanolayer_location}" ]]; then + local current_version + current_version=$($nanolayer_location --version) + if ! [[ $current_version == v* ]]; then + current_version=v$current_version + fi + + if ! [ $current_version == $required_version ]; then + echo "skipping usage of pre-existing nanolayer. (required version $required_version does not match existing version $current_version)" + nanolayer_location="" + fi + fi + + fi + + # If not previuse installation found, download it temporarly and delete at the end of the script + if [[ -z "${nanolayer_location}" ]]; then + + if [ "$(uname -sm)" == "Linux x86_64" ] || [ "$(uname -sm)" == "Linux aarch64" ]; then + tmp_dir=$(mktemp -d -t nanolayer-XXXXXXXXXX) + + clean_up() { + ARG=$? + rm -rf $tmp_dir + exit $ARG + } + trap clean_up EXIT + + if [ -x "/sbin/apk" ]; then + clib_type=musl + else + clib_type=gnu + fi + + tar_filename=nanolayer-"$(uname -m)"-unknown-linux-$clib_type.tgz + + # clean download will minimize leftover in case a downloaderlike wget or curl need to be installed + clean_download https://github.com/devcontainers-contrib/cli/releases/download/$required_version/$tar_filename $tmp_dir/$tar_filename + + tar xfzv $tmp_dir/$tar_filename -C "$tmp_dir" + chmod a+x $tmp_dir/nanolayer + nanolayer_location=$tmp_dir/nanolayer + + else + echo "No binaries compiled for non-x86-linux architectures yet: $(uname -m)" + exit 1 + fi + fi + + # Expose outside the resolved location + declare -g ${variable_name}=$nanolayer_location + +} \ No newline at end of file diff --git a/test/namespace-cli/scenarios.json b/test/namespace-cli/scenarios.json new file mode 100644 index 000000000..ae3b82f50 --- /dev/null +++ b/test/namespace-cli/scenarios.json @@ -0,0 +1,8 @@ +{ + "test": { + "image": "mcr.microsoft.com/devcontainers/base:debian", + "features": { + "namespace-cli": {} + } + } +} diff --git a/test/namespace-cli/test.sh b/test/namespace-cli/test.sh new file mode 100644 index 000000000..1f435061f --- /dev/null +++ b/test/namespace-cli/test.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -e + +source dev-container-features-test-lib + +check "nsc version" nsc version +check "docker-credential-nsc --help" docker-credential-nsc --help + +reportResults \ No newline at end of file