diff --git a/.bldr.toml b/.bldr.toml index ff97e185f4..ea4974bfb6 100644 --- a/.bldr.toml +++ b/.bldr.toml @@ -59,15 +59,6 @@ paths = [ "components/hab/*", ] -[hab-pkg-cfize] -plan_path = "components/pkg-cfize" -paths = [ - "components/pkg-export-container/*" -] - -[hab-pkg-mesosize] -plan_path = "components/pkg-mesosize" - [simple-hooks] plan_path = "test/fixtures/plans/simple-hooks" diff --git a/.cargo/audit.toml b/.cargo/audit.toml index f3faf64dd1..57b276e722 100644 --- a/.cargo/audit.toml +++ b/.cargo/audit.toml @@ -20,6 +20,8 @@ ignore = [ "RUSTSEC-2022-0071", # Unmaintained: rusoto "RUSTSEC-2024-0370", # Unmaintained: proc-macro-error (used by structopt) "RUSTSEC-2024-0375", # Unmaintained: atty (used by clap v2) + "RUSTSEC-2024-0384", # Unmaintained: instant (used by notify) + "RUSTSEC-2024-0388", # Unmaintained: derivative (used by log4rs) ] informational_warnings = [ "notice", diff --git a/.expeditor/config.yml b/.expeditor/config.yml index 9750036476..7499545756 100644 --- a/.expeditor/config.yml +++ b/.expeditor/config.yml @@ -33,6 +33,12 @@ pipelines: - finish_release: description: Tasks to be performed after a release has been promoted to stable +release_branches: + - main: + version_constraint: 2.* + - v1.6: + version_constraint: 1.* + staging_areas: - release_staging: workload: pull_request_merged:{{github_repo}}:{{release_branch}}:* diff --git a/.expeditor/release_habitat.pipeline.yml b/.expeditor/release_habitat.pipeline.yml index d121e12111..a2c31bec41 100644 --- a/.expeditor/release_habitat.pipeline.yml +++ b/.expeditor/release_habitat.pipeline.yml @@ -22,12 +22,6 @@ steps: # Release! ####################################################################### - - label: "[:linux: build hab (ARM)]" - command: - - .expeditor/scripts/release_habitat/package_and_upload_arm_binary.sh - agents: - queue: docker-linux-arm64 - - label: "[:linux: build hab]" command: - .expeditor/scripts/release_habitat/build_component.sh hab @@ -38,6 +32,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab]" + agents: + queue: default-privileged-aarch64 + env: + HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh hab + - label: "[:linux: :two: build hab]" command: - .expeditor/scripts/release_habitat/build_component.sh hab @@ -111,6 +114,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-plan-build]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh plan-build + - label: "[:linux: :two: build hab-plan-build]" command: - .expeditor/scripts/release_habitat/build_component.sh plan-build @@ -146,6 +158,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-backline]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh backline + - label: "[:linux: :two: build hab-backline]" command: - .expeditor/scripts/release_habitat/build_component.sh backline @@ -170,6 +191,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-studio]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh studio + - label: "[:linux: :two: build hab-studio]" command: - .expeditor/scripts/release_habitat/build_component.sh studio @@ -208,6 +238,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 launcher]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh launcher + - label: "[:linux: :two: build launcher]" command: - .expeditor/scripts/release_habitat/build_component.sh launcher @@ -241,6 +280,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-sup]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh sup + - label: "[:linux: :two: build hab-sup]" command: - .expeditor/scripts/release_habitat/build_component.sh sup @@ -274,6 +322,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-pkg-export-container]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh pkg-export-container + - label: "[:windows: build hab-pkg-export-container]" command: - powershell .expeditor/scripts/release_habitat/build_component.ps1 pkg-export-container @@ -295,6 +352,15 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: build aarch64 hab-pkg-export-tar]" + agents: + queue: default-privileged-aarch64 + env: + HAB_STUDIO_SECRET_HAB_FALLBACK_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/release_habitat/build_component.sh pkg-export-tar + - label: "[:linux: :two: build hab-pkg-export-tar]" command: - .expeditor/scripts/release_habitat/build_component.sh pkg-export-tar @@ -352,6 +418,17 @@ steps: environment: - BUILD_PKG_TARGET=x86_64-linux + - label: "[:linux: upload hab aarch64 binary]" + command: + - .expeditor/scripts/release_habitat/package_and_upload_binary.sh + if: build.creator.name == 'Chef Expeditor' || build.env("UPLOAD_AND_PROMOTE") == 'true' + expeditor: + executor: + docker: + privileged: true + environment: + - BUILD_PKG_TARGET=aarch64-linux + - label: "[:linux: :two: upload hab binary]" command: - .expeditor/scripts/release_habitat/package_and_upload_binary.sh diff --git a/.expeditor/scripts/finish_release/sync_acceptance.toml b/.expeditor/scripts/finish_release/sync_acceptance.toml index a828667779..02371a042a 100644 --- a/.expeditor/scripts/finish_release/sync_acceptance.toml +++ b/.expeditor/scripts/finish_release/sync_acceptance.toml @@ -14,8 +14,6 @@ packages = [ "core/hab-backline", "core/hab-pkg-export-container", "core/hab-pkg-export-tar", - "core/hab-pkg-mesosize", - "core/hab-pkg-cfize" ] [[x86_64-linux-kernel2]] diff --git a/.expeditor/scripts/release_habitat/build_component.ps1 b/.expeditor/scripts/release_habitat/build_component.ps1 index c59a3bc77e..9d87d80a2a 100755 --- a/.expeditor/scripts/release_habitat/build_component.ps1 +++ b/.expeditor/scripts/release_habitat/build_component.ps1 @@ -52,6 +52,7 @@ git config --global --add safe.directory C:/workdir # we must ensure that we're pulling dependencies from our build # channel when applicable. $Env:HAB_BLDR_CHANNEL="$Channel" +$Env:HAB_STUDIO_SECRET_HAB_REFRESH_CHANNEL="$Channel" Invoke-Expression "$baseHabExe pkg build components\$Component --keys core" . results\last_build.ps1 diff --git a/.expeditor/scripts/release_habitat/build_component.sh b/.expeditor/scripts/release_habitat/build_component.sh index 4f28d0b3d4..68fc5049e9 100755 --- a/.expeditor/scripts/release_habitat/build_component.sh +++ b/.expeditor/scripts/release_habitat/build_component.sh @@ -33,6 +33,9 @@ ${hab_binary} studio rm echo "--- :habicat: Building components/${component} using ${hab_binary}" +# Set the refresh channel to the release channel. +export HAB_STUDIO_SECRET_HAB_REFRESH_CHANNEL="${channel}" + HAB_BLDR_CHANNEL="${channel}" ${hab_binary} pkg build "components/${component}" source results/last_build.env diff --git a/.expeditor/scripts/shared.sh b/.expeditor/scripts/shared.sh index 73af9f96cd..1857a26988 100755 --- a/.expeditor/scripts/shared.sh +++ b/.expeditor/scripts/shared.sh @@ -20,9 +20,10 @@ curlbash_hab() { sudo rm -rf /hab/pkgs/core/hab/0.82.0 || \ rm -rf /hab/pkgs/core/hab/0.82.0 fi + curl https://raw.githubusercontent.com/habitat-sh/habitat/main/components/hab/install.sh | sudo bash -s -- -t "$pkg_target" case "${pkg_target}" in - x86_64-linux | x86_64-linux-kernel2) + x86_64-linux | aarch64-linux | x86_64-linux-kernel2) hab_binary="/bin/hab" ;; x86_64-darwin) diff --git a/.expeditor/scripts/verify/build_package-aarch64.sh b/.expeditor/scripts/verify/build_package-aarch64.sh new file mode 100755 index 0000000000..d3e7c86647 --- /dev/null +++ b/.expeditor/scripts/verify/build_package-aarch64.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -eou pipefail + +package_path=${1?package_path argument required} + +# Install hab from a temporarily uploaded aarch64 package +curl https://raw.githubusercontent.com/habitat-sh/habitat/main/components/hab/install.sh | sudo bash -s -- -t "$BUILD_PKG_TARGET" + +# Since we are only verifying we don't have build failures, make everything +# temp! +export HAB_ORIGIN +HAB_ORIGIN=core +# let's make a selfcontained tempdir for this job +export JOB_TEMP_ROOT +JOB_TEMP_ROOT=$(mktemp -d /tmp/job-root-XXXXXX) +export HAB_CACHE_KEY_PATH +HAB_CACHE_KEY_PATH="$JOB_TEMP_ROOT/keys" + +echo "--- :key: Generating temporary origin key" +hab origin key generate "$HAB_ORIGIN" +echo "--- :hab: Running hab pkg build for $package_path" +hab pkg build "$package_path" diff --git a/.expeditor/scripts/verify/run_cargo_test-aarch64.sh b/.expeditor/scripts/verify/run_cargo_test-aarch64.sh new file mode 100755 index 0000000000..6f29b84fe2 --- /dev/null +++ b/.expeditor/scripts/verify/run_cargo_test-aarch64.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +set -eou pipefail + +# Install hab from a temporarily uploaded aarch64 package +curl https://raw.githubusercontent.com/habitat-sh/habitat/main/components/hab/install.sh | sudo bash -s -- -t aarch64-linux + +# shellcheck source=.expeditor/scripts/shared.sh +source .expeditor/scripts/verify/shared.sh + +if [[ ${1:-"--"} = "--" ]]; then + scope="habitat workspace" +else + component="$1" + shift + scope="$component" +fi + +toolchain=$(get_toolchain) + +sudo -E hab pkg install core/zeromq +sudo -E hab pkg install core/protobuf +sudo -E hab pkg install core/rust/"$toolchain" +sudo -E hab pkg install core/xz +sudo -E hab pkg install core/coreutils +sudo -E hab pkg install core/openssl +sudo -E hab pkg install core/perl +sudo -E hab pkg install core/make + +export OPENSSL_DIR +OPENSSL_DIR="$(hab pkg path core/openssl)/bin" +export OPENSSL_LIB_DIR +OPENSSL_LIB_DIR="$(hab pkg path core/openssl)/lib" + +export LIBZMQ_PREFIX +LIBZMQ_PREFIX=$(hab pkg path core/zeromq) +# now include zeromq and gcc so they exist in the runtime library path when cargo test is run +export LD_LIBRARY_PATH +LD_LIBRARY_PATH="$(hab pkg path core/gcc-base)/lib:$(hab pkg path core/zeromq)/lib:$(hab pkg path core/xz)/lib:$(hab pkg path core/openssl)/lib" + +export LIBRARY_PATH +LIBRARY_PATH="$(hab pkg path core/xz)/lib" + +export PROTOC_NO_VENDOR=1 +export PROTOC +PROTOC=$(hab pkg path core/protobuf)/bin/protoc + +_oldPth=$PATH +_pth="$(hab pkg path core/coreutils)/bin:$(hab pkg path core/openssl)/bin:$(hab pkg path core/perl)/bin:$(hab pkg path core/make)/bin" +eval "$(hab pkg env core/rust/"$toolchain"):$PATH" +export PATH="$PATH:$_pth:$_oldPth" + +# Set testing filesystem root +export FS_ROOT +FS_ROOT=$(mktemp -d /tmp/testing-fs-root-XXXXXX) + +export RUST_BACKTRACE=1 + +# Build the all the hab binaries so that we can run integration tests +if [[ "$scope" == "sup" ]]; then + cargo build +fi + +echo "--- Running cargo test with scope '$scope' and args '$*'" + +if [[ -n ${component:-} ]]; then + cd "components/$component" +fi + +# We do not have any packages in the stable channel for aarch64 and probably never will. +# Set the HAB_INTERPRETER_IDENT to point to LTS-2024 to proceed with the tests. +export HAB_INTERPRETER_IDENT="core/busybox-static/1.36.1/20240805133911" + +# Always add `--quiet` to avoid the noise of compilation in test output. +# The invocation to this script can add `--format pretty` to the test runner +# args (that is, after --, like --nocapture and --test-threads) if the names +# of the tests being run is desired in the output. +cargo test --quiet "$@" diff --git a/.expeditor/verify.pipeline.yml b/.expeditor/verify.pipeline.yml index 47512bb5b3..26664305c7 100644 --- a/.expeditor/verify.pipeline.yml +++ b/.expeditor/verify.pipeline.yml @@ -4,20 +4,13 @@ expeditor: timeout_in_minutes: 60 env: HAB_BLDR_CHANNEL: "LTS-2024" + HAB_STUDIO_SECRET_HAB_REFRESH_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" steps: ####################################################################### # Linting! ####################################################################### - - label: "[unit] :linux: ARM Platform Support" - command: - - .expeditor/scripts/verify/run_native_cargo_test.sh - agents: - queue: docker-linux-arm64 - plugins: - - docker#v3.13.0: - image: "rust:1.62.1-buster" - - label: "[lint] :linux: :bash: Shellcheck" command: - .expeditor/scripts/verify/shellcheck.sh @@ -111,6 +104,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 ignored" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh -- --ignored + timeout_in_minutes: 20 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: builder-api-client" command: - .expeditor/scripts/verify/run_cargo_test.sh builder-api-client @@ -123,6 +130,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 builder-api-client" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh builder-api-client + timeout_in_minutes: 20 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: butterfly lock_as_mutex" command: - .expeditor/scripts/verify/run_cargo_test.sh butterfly --features "lock_as_mutex" -- --test-threads=1 --format=pretty @@ -133,6 +154,18 @@ steps: timeout_in_minutes: 20 soft_fail: true + - label: "[unit] :linux: aarch64 butterfly lock_as_mutex" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh butterfly --features "lock_as_mutex" -- --test-threads=1 --format=pretty + timeout_in_minutes: 20 + soft_fail: true + - label: "[unit] :linux: butterfly lock_as_rwlock" command: - .expeditor/scripts/verify/run_cargo_test.sh butterfly --features "lock_as_rwlock" -- --test-threads=1 --format=pretty @@ -145,6 +178,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 butterfly lock_as_rwlock" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh butterfly --features "lock_as_rwlock" -- --test-threads=1 --format=pretty + timeout_in_minutes: 20 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: common" command: - .expeditor/scripts/verify/run_cargo_test.sh common @@ -162,6 +209,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 common" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh common + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: core" command: - .expeditor/scripts/verify/run_cargo_test.sh core @@ -174,6 +235,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 core" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh core + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: core functional" command: - .expeditor/scripts/verify/run_cargo_test.sh core --features "functional" -- --format=pretty @@ -183,6 +258,17 @@ steps: privileged: true timeout_in_minutes: 20 + - label: "[unit] :linux: aarch64 core functional" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh core --features "functional" -- --format=pretty + timeout_in_minutes: 20 + - label: "[unit] :linux: http-client" command: - .expeditor/scripts/verify/run_cargo_test.sh http-client @@ -195,6 +281,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 http-client" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh http-client + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: hab" command: - .expeditor/scripts/verify/run_cargo_test.sh hab @@ -207,6 +307,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 hab" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh hab + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: launcher-client" command: - .expeditor/scripts/verify/run_cargo_test.sh launcher-client @@ -219,6 +333,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 launcher-client" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh launcher-client + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: launcher-protocol" command: - .expeditor/scripts/verify/run_cargo_test.sh launcher-protocol @@ -231,6 +359,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 launcher-protocol" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh launcher-protocol + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: pkg-export-container" command: - .expeditor/scripts/verify/run_cargo_test.sh pkg-export-container @@ -243,6 +385,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 pkg-export-container" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh pkg-export-container + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: pkg-export-tar" command: - .expeditor/scripts/verify/run_cargo_test.sh pkg-export-tar @@ -255,6 +411,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 pkg-export-tar" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh pkg-export-tar + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: sup lock_as_rwlock" command: - .expeditor/scripts/verify/run_cargo_test.sh sup --features "lock_as_rwlock" @@ -267,6 +437,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 sup lock_as_rwlock" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh sup --features "lock_as_rwlock" + timeout_in_minutes: 40 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: sup lock_as_mutex" command: - .expeditor/scripts/verify/run_cargo_test.sh sup --features "lock_as_mutex" @@ -277,6 +461,18 @@ steps: timeout_in_minutes: 40 soft_fail: true + - label: "[unit] :linux: aarch64 sup lock_as_mutex" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh sup --features "lock_as_mutex" + timeout_in_minutes: 40 + soft_fail: true + - label: "[unit] :linux: sup-client" command: - .expeditor/scripts/verify/run_cargo_test.sh sup-client @@ -289,6 +485,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 sup-client" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh sup-client + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: sup-protocol" command: - .expeditor/scripts/verify/run_cargo_test.sh sup-protocol @@ -301,6 +511,20 @@ steps: automatic: limit: 1 + - label: "[unit] :linux: aarch64 sup-protocol" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - .expeditor/scripts/verify/run_cargo_test-aarch64.sh sup-protocol + timeout_in_minutes: 10 + retry: + automatic: + limit: 1 + - label: "[unit] :linux: studio" command: - .expeditor/scripts/verify/run_studio_test.sh @@ -624,6 +848,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 test-probe" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh test-services/test-probe + retry: + automatic: + limit: 1 + # SHOULD BE UNCOMMENTED WHEN NEWER VERSION OF HAB IS RELEASED # - label: "[build] :linux: backline" @@ -641,6 +879,21 @@ steps: # automatic: # limit: 1 + # This build currently fails due to a circular dependency in 'LTS-2024' and needs investigation. + - label: "[build] :linux: aarch64 backline" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/backline + retry: + automatic: + limit: 1 + - label: "[build] :linux: hab" env: HAB_LICENSE: "accept-no-persist" @@ -655,6 +908,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 hab" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/hab + retry: + automatic: + limit: 1 + - label: "[build] :linux: launcher" env: HAB_LICENSE: "accept-no-persist" @@ -669,22 +936,19 @@ steps: automatic: limit: 1 -# SHOULD BE UNCOMMENTED WHEN NEWER VERSION OF HAB IS RELEASED - - # - label: "[build] :linux: pkg-cfize" - # env: - # HAB_LICENSE: "accept-no-persist" - # BUILD_PKG_TARGET: "x86_64-linux" - - # command: - # - .expeditor/scripts/verify/build_package.sh components/pkg-cfize - # expeditor: - # executor: - # docker: - # privileged: true - # retry: - # automatic: - # limit: 1 + - label: "[build] :linux: aarch64 launcher" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/launcher + retry: + automatic: + limit: 1 - label: "[build] :linux: pkg-export-container" env: @@ -700,6 +964,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 pkg-export-container" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/pkg-export-container + retry: + automatic: + limit: 1 + - label: "[build] :linux: pkg-export-tar" env: HAB_LICENSE: "accept-no-persist" @@ -714,16 +992,16 @@ steps: automatic: limit: 1 - - label: "[build] :linux: pkg-mesosize" + - label: "[build] :linux: aarch64 pkg-export-tar" + agents: + queue: default-privileged-aarch64 env: HAB_LICENSE: "accept-no-persist" - BUILD_PKG_TARGET: "x86_64-linux" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" command: - - .expeditor/scripts/verify/build_package.sh components/pkg-mesosize - expeditor: - executor: - docker: - privileged: true + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/pkg-export-tar retry: automatic: limit: 1 @@ -742,6 +1020,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 plan-build" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/plan-build + retry: + automatic: + limit: 1 + - label: "[build] :linux: studio" env: HAB_LICENSE: "accept-no-persist" @@ -756,6 +1048,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 studio" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/studio + retry: + automatic: + limit: 1 + - label: "[build] :linux: sup" env: HAB_LICENSE: "accept-no-persist" @@ -770,6 +1076,20 @@ steps: automatic: limit: 1 + - label: "[build] :linux: aarch64 sup" + agents: + queue: default-privileged-aarch64 + env: + HAB_LICENSE: "accept-no-persist" + HAB_BLDR_CHANNEL: "LTS-2024" + HAB_REFRESH_CHANNEL: "LTS-2024" + BUILD_PKG_TARGET: "aarch64-linux" + command: + - sudo -E .expeditor/scripts/verify/build_package-aarch64.sh components/sup + retry: + automatic: + limit: 1 + ####################################################################### # Things that have no tests but should be built to make sure they # still build. - Linux Kernel 2 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..07749c3d8a --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,100 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL Advanced" + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + schedule: + - cron: '31 21 * * 3' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: csharp + build-mode: none + - language: go + build-mode: autobuild + - language: javascript-typescript + build-mode: none + - language: ruby + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + curl "https://raw.githubusercontent.com/habitat-sh/habitat/master/components/hab/install.sh" | sudo bash + hab license accept + sudo hab license accept + sudo hab pkg install core/protobuf + sudo hab pkg install core/zeromq + export PROTOC=$(hab pkg path core/protobuf)/bin/protoc + export LIBZMQ_PREFIX=$(hab pkg path core/zeromq) + make + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/rust-cargo-audit-check.yml b/.github/workflows/rust-cargo-audit-check.yml index e26c4659ba..6f3c11ed80 100644 --- a/.github/workflows/rust-cargo-audit-check.yml +++ b/.github/workflows/rust-cargo-audit-check.yml @@ -7,7 +7,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@1.79.0 + - uses: dtolnay/rust-toolchain@1.81.0 + id: toolchain + - run: rustup override set ${{steps.toolchain.outputs.name}} - uses: ClementTsang/cargo-action@v0.0.6 with: command: install @@ -15,6 +17,3 @@ jobs: - uses: ClementTsang/cargo-action@v0.0.6 with: command: audit -# - uses: actions-rs/audit-check@v1 -# with: -# token: ${{ secrets.GITHUB_TOKEN }} diff --git a/BUILDING.md b/BUILDING.md index 26376c14db..654f673c66 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -171,49 +171,32 @@ These binary overrides can be great for rapid iteration, but will hide errors li ## Testing exporters -Changes to the exporters can be tested once the exporter package has been built locally. For example, to test changes to the Cloud Foundry exporter (`core/hab-pkg-cfize`), first enter the studio and build a new package: -``` +Changes to the exporters can be tested once the exporter package has been built locally. For example, to test changes to the container exporter (`core/hab-pkg-export-container`), first enter the studio and build a new package: + +```shell ➤ hab studio enter … -[1][default:/src:0]# build components/pkg-cfize +[1][default:/src:0]# build components/pkg-export-container … - hab-pkg-cfize: Installed Path: /hab/pkgs/jbauman/hab-pkg-cfize/0.56.0-dev/20180410205025 -``` -Now your modifications are installed locally, under `/hab-pkg-cfize`. You can run your new exporter with -``` -[6][default:/src:0]# hab pkg exec $HAB_ORIGIN/hab-pkg-cfize hab-pkg-cfize --help -hab-pkg-export-cf 0.56.0-dev/20180410205025 -… -``` -Note that the version is updated, confirming you're running the new code. The old version is still accessible by running -``` -[10][default:/src:1]# hab pkg export cf --help -hab-pkg-export-cf 0.55.0/20180321215151 + hab-pkg-export-container: Installed Path: /hab/pkgs/jasonheath/hab-pkg-export-container/2.0.17/20250110145427 … ``` -### Testing Cloud Foundry exporter - -See also [Exporting Docker Images for Cloud Foundry with Habitat](https://www.habitat.sh/blog/2018/03/habitat-cloudfoundry/). +Now your modifications are installed locally, under `/hab-pkg-export-container`. You can run your new exporter with -In order to test the Cloud Foundry exporter, you'll need a Cloud Foundry environment which supports -Docker images. If you have one already, you can skip to step 4. +```shell +[6][default:/src:0]# hab pkg exec $HAB_ORIGIN/hab-pkg-export-container hab-pkg-export-container --help +hab-pkg-exec 1.6.1243/20241227194506 +… +``` -*Note:* Make sure to install PCF Dev on your base operating system. It uses VirtualBox as its virtualizer -so you can't install that within a VM. Fortunately, PCF Dev is has support for Linux, Windows and Mac OS. +Note that the version is updated, confirming you're running the new code. The old version is still accessible by running -1. Install [the `cf` CLI](http://docs.pivotal.io/pivotalcf/2-1/cf-cli/install-go-cli.html). -2. Install [VirtualBox 5.0+](https://www.virtualbox.org/). -3. Install [PCF Dev](https://docs.pivotal.io/pcf-dev/index.html). *Note:* running `cf dev start` -for the first time requires a very large download (>7 GB as of this writing). -4. Build a `.hart` file and create a CF mapping `.toml`. See Stark and Wayne's -[Running Habitat Apps on Cloud Foundry](https://starkandwayne.com/blog/habitat-to-cloud-foundry/) and -[Exporting to Cloud Foundry documentation](https://www.habitat.sh/docs/developing-packages/#exporting-to-cloud-foundry) -for more details. -5. Run the exporter to add the `cf-` tagged image to Docker. -6. `docker push …` the image to Docker Hub. -7. `cf push …` the image from Docker Hub to add to the CF instance. *Note:* with PCF Dev this command -must be run from the source directory of the project to correctly identify the necessary buildpack. +``` shell +[10][default:/src:1]# hab pkg export container --help +hab-pkg-export-container 1.6.1243/20241227202254 +… +``` ## HAB_STUDIO_BINARY diff --git a/CHANGELOG.md b/CHANGELOG.md index dd9497f352..f334576ca7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,94 @@ # Habitat CHANGELOG - -## [1.6.1208](https://github.com/habitat-sh/habitat/tree/1.6.1208) (2024-11-08) + +## [2.0.26](https://github.com/habitat-sh/habitat/tree/2.0.26) (2025-01-14) #### Merged Pull Requests -- Bump libc from 0.2.161 to 0.2.162 [#9464](https://github.com/habitat-sh/habitat/pull/9464) ([dependabot[bot]](https://github.com/dependabot[bot])) +- clap-v4 support for `hab` cli [#9330](https://github.com/habitat-sh/habitat/pull/9330) ([agadgil-progress](https://github.com/agadgil-progress)) - -### Changes since 1.6.1041 release + +### Changes since 1.6.1215 release #### Merged Pull Requests +- clap-v4 support for `hab` cli [#9330](https://github.com/habitat-sh/habitat/pull/9330) ([agadgil-progress](https://github.com/agadgil-progress)) +- pass release channel to HAB_STUDIO_SECRET_HAB_REFRESH_CHANNEL in windows release pipeline [#9566](https://github.com/habitat-sh/habitat/pull/9566) ([mwrock](https://github.com/mwrock)) +- Bump cc from 1.2.7 to 1.2.9 [#9563](https://github.com/habitat-sh/habitat/pull/9563) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump notify from 7.0.0 to 8.0.0 [#9564](https://github.com/habitat-sh/habitat/pull/9564) ([dependabot[bot]](https://github.com/dependabot[bot])) +- powershell workaround for hab studio rm with powershell 7.5 and older windows os [#9561](https://github.com/habitat-sh/habitat/pull/9561) ([mwrock](https://github.com/mwrock)) +- Removes mesosize and cfize [#9557](https://github.com/habitat-sh/habitat/pull/9557) ([jasonheath](https://github.com/jasonheath)) +- Bump bitflags from 2.6.0 to 2.7.0 [#9560](https://github.com/habitat-sh/habitat/pull/9560) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump tokio from 1.42.0 to 1.43.0 [#9559](https://github.com/habitat-sh/habitat/pull/9559) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Rustfmt and Nightly Rust Bump to nightly-2025-01-09 [#9555](https://github.com/habitat-sh/habitat/pull/9555) ([chef-expeditor[bot]](https://github.com/chef-expeditor[bot])) +- set refresh-channel to LTS-2024 in verify [#9556](https://github.com/habitat-sh/habitat/pull/9556) ([mwrock](https://github.com/mwrock)) +- Bump petgraph from 0.7.0 to 0.7.1 [#9553](https://github.com/habitat-sh/habitat/pull/9553) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump winreg from 0.52.0 to 0.53.0 [#9554](https://github.com/habitat-sh/habitat/pull/9554) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump serde_json from 1.0.134 to 1.0.135 [#9550](https://github.com/habitat-sh/habitat/pull/9550) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump async-trait from 0.1.84 to 0.1.85 [#9551](https://github.com/habitat-sh/habitat/pull/9551) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump pin-project from 1.1.7 to 1.1.8 [#9549](https://github.com/habitat-sh/habitat/pull/9549) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump async-trait from 0.1.83 to 0.1.84 [#9543](https://github.com/habitat-sh/habitat/pull/9543) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump tabwriter from 1.4.0 to 1.4.1 [#9545](https://github.com/habitat-sh/habitat/pull/9545) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump os_info from 3.9.1 to 3.9.2 [#9544](https://github.com/habitat-sh/habitat/pull/9544) ([dependabot[bot]](https://github.com/dependabot[bot])) +- release uploads aarch64 from x64 docker [#9546](https://github.com/habitat-sh/habitat/pull/9546) ([mwrock](https://github.com/mwrock)) +- bump powershell to 7.5.0 [#9542](https://github.com/habitat-sh/habitat/pull/9542) ([mwrock](https://github.com/mwrock)) +- Bump tempfile from 3.14.0 to 3.15.0 [#9540](https://github.com/habitat-sh/habitat/pull/9540) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.2.6 to 1.2.7 [#9539](https://github.com/habitat-sh/habitat/pull/9539) ([dependabot[bot]](https://github.com/dependabot[bot])) +- remove expeditor staging area [#9537](https://github.com/habitat-sh/habitat/pull/9537) ([mwrock](https://github.com/mwrock)) +- testing some expeditor stuff [#9535](https://github.com/habitat-sh/habitat/pull/9535) ([mwrock](https://github.com/mwrock)) +- turn off cpp codeql [#9526](https://github.com/habitat-sh/habitat/pull/9526) ([mwrock](https://github.com/mwrock)) +- fix for yanked petgraph [#9531](https://github.com/habitat-sh/habitat/pull/9531) ([mwrock](https://github.com/mwrock)) +- Bump petgraph from 0.6.5 to 0.6.6 [#9529](https://github.com/habitat-sh/habitat/pull/9529) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump serde from 1.0.216 to 1.0.217 [#9528](https://github.com/habitat-sh/habitat/pull/9528) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump glob from 0.3.1 to 0.3.2 [#9527](https://github.com/habitat-sh/habitat/pull/9527) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.2.5 to 1.2.6 [#9523](https://github.com/habitat-sh/habitat/pull/9523) ([dependabot[bot]](https://github.com/dependabot[bot])) +- start v2 and add release branches in expeditor config [#9521](https://github.com/habitat-sh/habitat/pull/9521) ([mwrock](https://github.com/mwrock)) +- refresh channel should only include core packages [#9510](https://github.com/habitat-sh/habitat/pull/9510) ([mwrock](https://github.com/mwrock)) +- Bump anyhow from 1.0.94 to 1.0.95 [#9519](https://github.com/habitat-sh/habitat/pull/9519) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump env_logger from 0.11.5 to 0.11.6 [#9518](https://github.com/habitat-sh/habitat/pull/9518) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump serde_json from 1.0.133 to 1.0.134 [#9515](https://github.com/habitat-sh/habitat/pull/9515) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump rcgen from 0.13.1 to 0.13.2 [#9517](https://github.com/habitat-sh/habitat/pull/9517) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump os_info from 3.9.0 to 3.9.1 [#9516](https://github.com/habitat-sh/habitat/pull/9516) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump libc from 0.2.168 to 0.2.169 [#9512](https://github.com/habitat-sh/habitat/pull/9512) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.2.4 to 1.2.5 [#9511](https://github.com/habitat-sh/habitat/pull/9511) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump hyper from 1.5.1 to 1.5.2 [#9506](https://github.com/habitat-sh/habitat/pull/9506) ([dependabot[bot]](https://github.com/dependabot[bot])) +- remove commented code from audit action [#9508](https://github.com/habitat-sh/habitat/pull/9508) ([mwrock](https://github.com/mwrock)) +- use rust 1.81 in cargo audit [#9507](https://github.com/habitat-sh/habitat/pull/9507) ([mwrock](https://github.com/mwrock)) +- fixes for release pipeline (aarch64) [#9481](https://github.com/habitat-sh/habitat/pull/9481) ([sajjaphani](https://github.com/sajjaphani)) +- updating release doc to remove linux kernel2 [#9482](https://github.com/habitat-sh/habitat/pull/9482) ([mwrock](https://github.com/mwrock)) +- Bump cc from 1.2.3 to 1.2.4 [#9505](https://github.com/habitat-sh/habitat/pull/9505) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump semver from 1.0.23 to 1.0.24 [#9503](https://github.com/habitat-sh/habitat/pull/9503) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump chrono from 0.4.38 to 0.4.39 [#9501](https://github.com/habitat-sh/habitat/pull/9501) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump libc from 0.2.167 to 0.2.168 [#9500](https://github.com/habitat-sh/habitat/pull/9500) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump prost-build from 0.13.3 to 0.13.4 [#9498](https://github.com/habitat-sh/habitat/pull/9498) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.2.2 to 1.2.3 [#9497](https://github.com/habitat-sh/habitat/pull/9497) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.2.1 to 1.2.2 [#9491](https://github.com/habitat-sh/habitat/pull/9491) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump libc from 0.2.164 to 0.2.167 [#9488](https://github.com/habitat-sh/habitat/pull/9488) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump errno from 0.3.9 to 0.3.10 [#9486](https://github.com/habitat-sh/habitat/pull/9486) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump os_info from 3.8.2 to 3.9.0 [#9490](https://github.com/habitat-sh/habitat/pull/9490) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump tokio from 1.41.1 to 1.42.0 [#9489](https://github.com/habitat-sh/habitat/pull/9489) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump bytes from 1.8.0 to 1.9.0 [#9487](https://github.com/habitat-sh/habitat/pull/9487) ([dependabot[bot]](https://github.com/dependabot[bot])) +- bump url from yanked 2.5.3 to 2.5.4 [#9483](https://github.com/habitat-sh/habitat/pull/9483) ([mwrock](https://github.com/mwrock)) +- Rustfmt and Nightly Rust Bump to nightly-2024-11-18 [#9478](https://github.com/habitat-sh/habitat/pull/9478) ([chef-expeditor[bot]](https://github.com/chef-expeditor[bot])) +- Bump hyper from 1.5.0 to 1.5.1 [#9480](https://github.com/habitat-sh/habitat/pull/9480) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump serde_json from 1.0.132 to 1.0.133 [#9477](https://github.com/habitat-sh/habitat/pull/9477) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump libc from 0.2.162 to 0.2.164 [#9476](https://github.com/habitat-sh/habitat/pull/9476) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Pipeline setup for aarch64-linux [#9326](https://github.com/habitat-sh/habitat/pull/9326) ([sajjaphani](https://github.com/sajjaphani)) +- update environment variable doc to reflect HAB_REFRESH_CHANNEL [#9475](https://github.com/habitat-sh/habitat/pull/9475) ([mwrock](https://github.com/mwrock)) +- update changelog for hab 1.6.1215 [#9479](https://github.com/habitat-sh/habitat/pull/9479) ([mwrock](https://github.com/mwrock)) + + + + +## [1.6.1215](https://github.com/habitat-sh/habitat/tree/1.6.1215) (2024-11-15) + +#### Merged Pull Requests +- Bump cc from 1.2.0 to 1.2.1 [#9473](https://github.com/habitat-sh/habitat/pull/9473) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump flate2 from 1.0.34 to 1.0.35 [#9474](https://github.com/habitat-sh/habitat/pull/9474) ([dependabot[bot]](https://github.com/dependabot[bot])) +- ignore derivative and instant unmaintained crates in audits [#9472](https://github.com/habitat-sh/habitat/pull/9472) ([mwrock](https://github.com/mwrock)) +- Bump cc from 1.1.37 to 1.2.0 [#9471](https://github.com/habitat-sh/habitat/pull/9471) ([dependabot[bot]](https://github.com/dependabot[bot])) +- fix dep resolution with HAB_STUDIO_INSTALL_PKGS [#9467](https://github.com/habitat-sh/habitat/pull/9467) ([sajjaphani](https://github.com/sajjaphani)) +- Bump serde from 1.0.214 to 1.0.215 [#9468](https://github.com/habitat-sh/habitat/pull/9468) ([dependabot[bot]](https://github.com/dependabot[bot])) +- Bump cc from 1.1.36 to 1.1.37 [#9466](https://github.com/habitat-sh/habitat/pull/9466) ([dependabot[bot]](https://github.com/dependabot[bot])) - Bump libc from 0.2.161 to 0.2.162 [#9464](https://github.com/habitat-sh/habitat/pull/9464) ([dependabot[bot]](https://github.com/dependabot[bot])) - Bump tokio from 1.41.0 to 1.41.1 [#9463](https://github.com/habitat-sh/habitat/pull/9463) ([dependabot[bot]](https://github.com/dependabot[bot])) - Bump tempfile from 3.13.0 to 3.14.0 [#9462](https://github.com/habitat-sh/habitat/pull/9462) ([dependabot[bot]](https://github.com/dependabot[bot])) @@ -217,9 +295,6 @@ - Bump chrono from 0.4.37 to 0.4.38 [#9222](https://github.com/habitat-sh/habitat/pull/9222) ([dependabot[bot]](https://github.com/dependabot[bot])) - Bump hyper from 1.2.0 to 1.3.1 [#9225](https://github.com/habitat-sh/habitat/pull/9225) ([dependabot[bot]](https://github.com/dependabot[bot])) - update changelog for 1.6.1041 release [#9229](https://github.com/habitat-sh/habitat/pull/9229) ([mwrock](https://github.com/mwrock)) - - - ## [1.6.1041](https://github.com/habitat-sh/habitat/tree/1.6.1041) (2024-04-10) diff --git a/Cargo.lock b/Cargo.lock index c04b9f2cf1..6eadca521a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,7 +8,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "bytes", "futures-core", "futures-sink", @@ -32,7 +32,7 @@ dependencies = [ "actix-utils", "ahash", "base64 0.22.1", - "bitflags 2.6.0", + "bitflags 2.7.0", "bytes", "bytestring", "derive_more", @@ -324,9 +324,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" dependencies = [ "backtrace", ] @@ -351,9 +351,9 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" dependencies = [ "proc-macro2", "quote", @@ -454,9 +454,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "blake2b_simd" @@ -507,9 +507,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "bytestring" @@ -532,9 +532,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.36" +version = "1.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70" +checksum = "c8293772165d9345bdaaa39b45b2109591e63fe5e6fbc23c6ff930a048aa310b" dependencies = [ "shlex", ] @@ -553,9 +553,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -576,7 +576,7 @@ dependencies = [ "bitflags 1.3.2", "strsim 0.8.0", "textwrap", - "unicode-width", + "unicode-width 0.1.13", "vec_map", ] @@ -924,9 +924,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -962,12 +962,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1020,11 +1020,17 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide 0.8.0", @@ -1210,9 +1216,9 @@ checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "h2" @@ -1258,9 +1264,10 @@ version = "0.0.0" dependencies = [ "anyhow", "base64 0.22.1", - "bitflags 2.6.0", + "bitflags 2.7.0", "chrono", "clap 2.33.1", + "clap 4.5.9", "configopt", "ctrlc", "dirs", @@ -1300,7 +1307,7 @@ dependencies = [ "walkdir", "widestring 1.1.0", "winapi", - "winreg", + "winreg 0.53.0", ] [[package]] @@ -1451,7 +1458,7 @@ version = "0.0.0" dependencies = [ "async-trait", "bimap", - "bitflags 2.6.0", + "bitflags 2.7.0", "clap 2.33.1", "clap 4.5.9", "dirs", @@ -1466,7 +1473,7 @@ dependencies = [ "nix", "parking_lot", "pbr", - "petgraph", + "petgraph 0.7.1", "regex 1.11.1", "reqwest", "retry", @@ -1628,12 +1635,12 @@ dependencies = [ "habitat_common", "habitat_core", "habitat_http_client", - "hyper 1.5.0", + "hyper 1.5.2", "lazy_static 1.5.0", "libc", "log 0.4.22", "log4rs", - "mio 1.0.2", + "mio 1.0.3", "multimap", "nix", "notify", @@ -1862,9 +1869,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" dependencies = [ "bytes", "futures-channel", @@ -1901,7 +1908,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-util", "native-tls", "tokio", @@ -1920,7 +1927,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.5.0", + "hyper 1.5.2", "pin-project-lite", "socket2", "tokio", @@ -2099,9 +2106,9 @@ checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" [[package]] name = "indexmap" -version = "2.2.6" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", "hashbrown", @@ -2109,11 +2116,11 @@ dependencies = [ [[package]] name = "inotify" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.7.0", "inotify-sys", "libc", ] @@ -2127,15 +2134,6 @@ dependencies = [ "libc", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - [[package]] name = "ipc-channel" version = "0.18.1" @@ -2250,9 +2248,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.162" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libredox" @@ -2260,7 +2258,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "libc", ] @@ -2489,11 +2487,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "log 0.4.22", "wasi", @@ -2541,7 +2538,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "cfg-if", "cfg_aliases", "libc", @@ -2559,31 +2556,28 @@ dependencies = [ [[package]] name = "notify" -version = "7.0.0" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c533b4c39709f9ba5005d8002048266593c1cfaf3c5f0739d5b8ab0c6c504009" +checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "filetime", "fsevent-sys", "inotify", "kqueue", "libc", "log 0.4.22", - "mio 1.0.2", + "mio 1.0.3", "notify-types", "walkdir", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "notify-types" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7393c226621f817964ffb3dc5704f9509e107a8b024b489cc2c1b217378785df" -dependencies = [ - "instant", -] +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" [[package]] name = "nu-ansi-term" @@ -2647,7 +2641,7 @@ version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "cfg-if", "foreign-types", "libc", @@ -2712,9 +2706,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.8.2" +version = "3.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +checksum = "6e6520c8cc998c5741ee68ec1dc369fc47e5f0ea5320018ecf2a1ccd6328f48b" dependencies = [ "log 0.4.22", "serde", @@ -2746,7 +2740,7 @@ dependencies = [ "backtrace", "cfg-if", "libc", - "petgraph", + "petgraph 0.6.5", "redox_syscall 0.5.2", "smallvec", "thread-id", @@ -2798,7 +2792,17 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", + "fixedbitset 0.4.2", + "indexmap", +] + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset 0.5.7", "indexmap", ] @@ -2842,18 +2846,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" dependencies = [ "proc-macro2", "quote", @@ -2961,9 +2965,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", "prost-derive", @@ -2971,17 +2975,16 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" +checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" dependencies = [ - "bytes", "heck 0.5.0", "itertools", "log 0.4.22", "multimap", "once_cell", - "petgraph", + "petgraph 0.6.5", "prettyplease", "prost", "prost-types", @@ -2992,9 +2995,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", "itertools", @@ -3005,9 +3008,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" dependencies = [ "prost", ] @@ -3102,9 +3105,9 @@ dependencies = [ [[package]] name = "rcgen" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54077e1872c46788540de1ea3d7f4ccb1983d12f9aa909b234468676c1a36779" +checksum = "75e669e5202259b5314d1ea5397316ad400819437857b90861765f24c4cf80a2" dependencies = [ "pem", "ring", @@ -3128,7 +3131,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", ] [[package]] @@ -3230,7 +3233,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -3256,7 +3259,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "winreg", + "winreg 0.52.0", ] [[package]] @@ -3387,7 +3390,7 @@ version = "0.38.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "errno", "libc", "linux-raw-sys", @@ -3509,7 +3512,7 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "core-foundation", "core-foundation-sys", "libc", @@ -3528,15 +3531,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" [[package]] name = "serde" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] @@ -3562,9 +3565,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", @@ -3573,9 +3576,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" dependencies = [ "indexmap", "itoa", @@ -3847,11 +3850,11 @@ dependencies = [ [[package]] name = "tabwriter" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a327282c4f64f6dc37e3bba4c2b6842cc3a992f204fa58d917696a89f691e5f6" +checksum = "fce91f2f0ec87dff7e6bcbbeb267439aa1188703003c6055193c821487400432" dependencies = [ - "unicode-width", + "unicode-width 0.2.0", ] [[package]] @@ -3873,12 +3876,13 @@ checksum = "37c12559dba7383625faaff75be24becf35bfc885044375bcab931111799a3da" [[package]] name = "tempfile" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand", + "getrandom", "once_cell", "rustix", "windows-sys 0.59.0", @@ -3921,7 +3925,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ - "unicode-width", + "unicode-width 0.1.13", ] [[package]] @@ -4025,14 +4029,14 @@ dependencies = [ [[package]] name = "tokio" -version = "1.41.1" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.2", + "mio 1.0.3", "parking_lot", "pin-project-lite", "signal-hook-registry", @@ -4043,9 +4047,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", @@ -4086,9 +4090,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -4301,6 +4305,12 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unsafe-any-ors" version = "1.0.0" @@ -4333,9 +4343,9 @@ dependencies = [ [[package]] name = "url" -version = "2.5.3" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -4793,6 +4803,16 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "winreg" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a47b489f8fc5b949477e89dca4d1617f162c6c53fbcbefde553ab17b342ff9" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "write16" version = "1.0.0" diff --git a/END_TO_END_TESTING.md b/END_TO_END_TESTING.md index 3e1b7020aa..ee776ba647 100644 --- a/END_TO_END_TESTING.md +++ b/END_TO_END_TESTING.md @@ -35,7 +35,7 @@ packages for `stable` below in `HAB_BLDR_CHANNEL` if that is required. ``` env HAB_BLDR_CHANNEL=stable HAB_ORIGIN=core \ -hab studio run "for component in hab plan-build backline studio launcher sup pkg-export-tar pkg-export-docker pkg-mesosize pkg-cfize; do build components/\$component; done" +hab studio run "for component in hab plan-build backline studio launcher sup pkg-export-tar pkg-export-docker; do build components/\$component; done" ###################################################################### # Before uploading, ensure only your intended hart files are present # ###################################################################### diff --git a/POWERSHELL_VERSION b/POWERSHELL_VERSION index c817c72f89..54c0eff7a9 100644 --- a/POWERSHELL_VERSION +++ b/POWERSHELL_VERSION @@ -1 +1 @@ -7.1.3/20210706193415 +7.5.0 diff --git a/RELEASE.md b/RELEASE.md index aee4224f17..d1612754ae 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -27,7 +27,6 @@ new packages coming in and invalidating your efforts. For each platform ([darwin](https://packages.chef.io/files/staging/habitat/latest/hab-x86_64-darwin.zip), [linux](https://packages.chef.io/files/staging/habitat/latest/hab-x86_64-linux.tar.gz), -[linux-kernel2](https://packages.chef.io/files/staging/habitat/latest/hab-x86_64-linux-kernel2.tar.gz), [windows](https://packages.chef.io/files/staging/habitat/latest/hab-x86_64-windows.zip)), download the latest release candidate CLI from `packages.chef.io`. You **must** have run the `/expeditor` Slack command above _before_ @@ -53,21 +52,6 @@ curl https://raw.githubusercontent.com/habitat-sh/habitat/master/components/hab/ sudo hab pkg install core/hab --binlink --force --channel=staging ``` -#### Linux, Kernel 2 - -Run either of the following commands: - -``` sh -curl https://raw.githubusercontent.com/habitat-sh/habitat/master/components/hab/install.sh \ - | sudo bash -s -- -c staging -t x86_64-linux-kernel2 -``` -(Note the addition of the target option on the "curlbash" command! Without this you will end up with -the modern Linux version, and things won't work properly!) - -```sh -sudo hab pkg install core/hab --binlink --force --channel=staging -``` - #### macOS ``` sh @@ -120,7 +104,9 @@ Here is how you can validate the version of the cli, supervisor and the supervis On Linux: ``` sh hab --version -hab sup --version +sudo -E hab sup --version + +hab studio rm hab studio enter hab --version sup-log @@ -133,6 +119,7 @@ On Windows: ``` pwsh hab --version hab sup --version +hab studio rm hab studio enter hab --version Get-Supervisorlog @@ -158,81 +145,50 @@ hab studio enter ``` Then, once inside the Studio, you could try these: +On Linux: + ``` sh # Does the version of the cli inside the studio match staging? hab --version # Does the version of the supervisor inside the studio match staging? sup-log ^C -# build the apache plan -build core-plans/httpd +# build the redis plan +build core-plans/redis source results/last_build.env hab svc load $pkg_ident sup-log -# Is Apache running +# Is redis running and accepting connections ^C +# Is it connectable? +hab pkg exec $pkg_ident redis-cli --stat +``` + +On Windows: +``` sh +# Does the version of the cli inside the studio match staging? +hab --version +# Does the version of the supervisor inside the studio match staging? +# The supervisor log will come up in a different window in a non-docker +# Windows Studio. Make sure to "accept" any windows firewall requests. +Get-SupervisorLog +# build the nginx plan +build core-plans/nginx +. results/last_build.ps1 +hab svc load $pkg_ident +# Look at the log window to see if nginx running # Is it responding? -hab pkg exec core/busybox-static wget http://localhost -S +Invoke-WebRequest http://localhost ``` -On Linux, testing the Docker studio is identical, except you enter +Testing the Docker studio is identical, except you enter using the following command instead: ```sh hab studio enter -D ``` -Test both in the chroot and docker studios on x86 linux. - -### Validating x86_64-linux-kernel2 - -For this PackageTarget it is important that you perform validation on a Linux system running a 2.6 series kernel. CentOS 6 is recommended because it ships with a kernel of the appropriate age, but any distro with a Kernel between 2.6.32 and 3.0.0 can be used. Included in the `support/validation/x86_64-linux-kernel2` directory in this repository is a Vagrantfile that will create a CentOS-6 VM to perform the validation. You can also run a VM in EC2. - -The Vagrantfile is configured to grab the -[core-plans](https://github.com/habitat-sh/core-plans) repository (to -give you something to build), as well as grab the secret key for your -`HAB_ORIGIN` (using the `HAB_ORIGIN` and `HAB_AUTH_TOKEN` variables in -your environment). Additionally, it will automatically install the -release candidate `hab` binary from the `staging` channel unless you -explicitly override that with the `INSTALL_CHANNEL` variable (see below). - - -```sh -export HAB_ORIGIN=... -export HAB_AUTH_TOKEN=... - -# Only if you *don't* want the staging artifact, for some reason -export INSTALL_CHANNEL=... - -vagrant up -vagrant ssh -``` -Once inside the VM, set your override environment variables (as above) -and experiment. For example: - -```sh -export HAB_INTERNAL_BLDR_CHANNEL=staging -export HAB_STUDIO_SECRET_HAB_INTERNAL_BLDR_CHANNEL=staging -export HAB_ORIGIN= -hab studio enter -``` - -Then, once inside the Studio, you could try these: - -``` sh -# Does the version of the cli inside the studio match staging? -hab --version -# Does the version of the supervisor inside the studio match staging? -sup-log -^C -# build the apache plan -build core-plans/redis -source results/last_build.env -hab svc load $pkg_ident -sup-log -# Is Redis running and reporting "Ready to accept connections" -^C -``` +Test both in the native and docker studios on x86 linux and windows. ## Promote from Staging to Current diff --git a/RUSTFMT_VERSION b/RUSTFMT_VERSION index 073051caf9..77389bffe2 100644 --- a/RUSTFMT_VERSION +++ b/RUSTFMT_VERSION @@ -1 +1 @@ -nightly-2024-04-22 +nightly-2025-01-09 diff --git a/RUST_NIGHTLY_VERSION b/RUST_NIGHTLY_VERSION index 073051caf9..77389bffe2 100644 --- a/RUST_NIGHTLY_VERSION +++ b/RUST_NIGHTLY_VERSION @@ -1 +1 @@ -nightly-2024-04-22 +nightly-2025-01-09 diff --git a/VERSION b/VERSION index 71fae851e6..46d9844b78 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6.1208 \ No newline at end of file +2.0.26 \ No newline at end of file diff --git a/components/common/src/cli/clap_validators.rs b/components/common/src/cli/clap_validators.rs index fa4e1eed30..fc2c0da008 100644 --- a/components/common/src/cli/clap_validators.rs +++ b/components/common/src/cli/clap_validators.rs @@ -70,3 +70,248 @@ impl clap_v4::builder::TypedValueParser for HabPackageInstallSourceValueParser { } } } + +/// Struct implementing validator for Habitat Origin +/// +/// Validates with `habitat_core::origin::Origin::validate` function. +#[derive(Clone)] +pub struct HabOriginValueParser; + +impl clap_v4::builder::TypedValueParser for HabOriginValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = habitat_core::origin::Origin::validate(val); + if result.is_err() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.err().unwrap(),))); + Err(err) + } else { + Ok(value.to_str().unwrap().to_string()) + } + } +} + +/// Struct implimenting validator that validates the value is a valid path +#[derive(Clone)] +pub struct FileExistsValueParser; + +impl clap_v4::builder::TypedValueParser for FileExistsValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, false, false, "is not a valid file") + } +} + +// TODO: This will be used by `hab config` (this implements the functionality of +// `file_exists_or_stdin` validator in Clap v2. +/// Struct implementing validator that validates the valie is a valid 'file' or 'stdin' +#[derive(Clone)] +pub struct FileExistsOrStdinValueParser; + +impl clap_v4::builder::TypedValueParser for FileExistsOrStdinValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, false, true, "is not a valid file or stdin") + } +} + +/// Struct implemeitng validator that valudates the value is a valid directory +/// +/// Internally uses `ValidPathValueParser` +#[derive(Clone)] +pub struct DirExistsValueParser; + +impl clap_v4::builder::TypedValueParser for DirExistsValueParser { + type Value = std::path::PathBuf; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, true, false, "is not a valid directory").map(|x| { + x.into() + }) + } +} + +// Checks whether a give path is a file or a dir or stdin, used internally by the validators +// +// eg. FileExistsValueParser will call this command with both `check_dir` and `check_stdin` set to +// false. DirExistsValueParser will call this command with `check_dir` set to `true` and +// `check_stdin` set to `false` etc. +fn check_valid_file_dir_stdin(path: &std::path::Path, check_dir: bool, check_stdin: bool) -> bool { + let mut is_valid = path.is_file(); + + if !is_valid && check_dir { + is_valid = path.is_dir(); + } + + is_valid = if check_stdin { + if is_valid { + is_valid + } else if let Some(v) = path.to_str() { + v == "-" + } else { + false + } + } else { + is_valid + }; + + is_valid +} + +fn parse_ref_internal(cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr, + check_dir: bool, + check_stdin: bool, + err_str: &str) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = std::path::Path::new(&val); + if !check_valid_file_dir_stdin(result, check_dir, check_stdin) { + let mut err = clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + err_str,))); + Err(err) + } else { + Ok(value.to_str().unwrap().to_string()) + } +} +/// Validate a given file is a 'toml' file or contains valid package idents only. +/// +/// Packages to be installed can be read from a 'toml' file or a file containing package idents +/// only. The actual validation of whether the contents of the 'toml' file are correct is performed +/// by the actual command that calls this validation. This validation will succeed if a file is a +/// 'toml' file (possibly in wrong format) or the file contains packaged identifiers, one per line. +#[derive(Clone)] +pub struct TomlOrPkgIdentFileValueParser; + +use crate::cli::{file_into_idents, + is_toml_file}; + +impl clap_v4::builder::TypedValueParser for TomlOrPkgIdentFileValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + if is_toml_file(&val) { + return Ok(val); + } + + let result = file_into_idents(&val); + if result.is_err() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.err().unwrap(),))); + Err(err) + } else { + Ok(val) + } + } +} + +/// Validates whether given input is a valid Package Identifier +/// +/// This validator returns success if the given input is a valid simple Package Identifier or a +/// fully qualified PackageIdentifier +/// +/// Use `value_parser = HabPkgIdentValueParser::simple()` for simple Package Identifier. +/// Use `value_parser = HabPkgIdentValueParser::full()` for fully qualified Package Identifier. +#[derive(Clone)] +pub struct HabPkgIdentValueParser { + fully_qualified: bool, +} + +impl HabPkgIdentValueParser { + /// For Simple Package Identifier of the form 'origin/name' + pub fn simple() -> Self { Self { fully_qualified: false, } } + + /// For Full Package Identifier of the form 'origin/name/version/release' + pub fn full() -> Self { Self { fully_qualified: true, } } +} + +use habitat_core::package::ident::{FullyQualifiedPackageIdent, + PackageIdent}; + +impl clap_v4::builder::TypedValueParser for HabPkgIdentValueParser { + type Value = PackageIdent; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = if self.fully_qualified { + FullyQualifiedPackageIdent::from_str(&val).err() + } else { + PackageIdent::from_str(&val).err() + }; + + if result.is_some() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.unwrap(),))); + Err(err) + } else { + Ok(val.into()) + } + } +} + +// TODO: Add Unit tests for all validators diff --git a/components/common/src/templating.rs b/components/common/src/templating.rs index 2f1bfa4483..7b44c1a6ee 100644 --- a/components/common/src/templating.rs +++ b/components/common/src/templating.rs @@ -178,7 +178,8 @@ mod test { FS_ROOT_PATH}, package::PackageIdent}, templating::test_helpers::*}; - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"),)))] use habitat_core::package::metadata::MetaFile; use std::{collections::BTreeMap, @@ -420,7 +421,8 @@ mod test { let pkg_install = PackageInstall::new_from_parts(pg_id, root.clone(), root.clone(), root.clone()); // Platforms without standard package support require all packages to be native packages - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"))))] { tokio::fs::create_dir_all(pkg_install.installed_path()).await diff --git a/components/common/src/templating/config.rs b/components/common/src/templating/config.rs index bfb43e29eb..57ad3f8708 100644 --- a/components/common/src/templating/config.rs +++ b/components/common/src/templating/config.rs @@ -614,7 +614,8 @@ mod test { PackageInstall}}, templating::{context::RenderContext, test_helpers::*}}; - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"),)))] use hcore::package::metadata::MetaFile; use std::{env, @@ -1151,7 +1152,8 @@ mod test { "config message is {{cfg.message}}"); // Platforms without standard package support require all packages to be native packages - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"))))] { create_with_content(pkg_dir.join(MetaFile::PackageType.to_string()), "native"); diff --git a/components/common/src/templating/hooks.rs b/components/common/src/templating/hooks.rs index c0abb0a717..6ca15bd45d 100644 --- a/components/common/src/templating/hooks.rs +++ b/components/common/src/templating/hooks.rs @@ -633,7 +633,8 @@ mod tests { context::RenderContext, package::Pkg, test_helpers::*}; - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"),)))] use habitat_core::package::metadata::MetaFile; use habitat_core::{package::{PackageIdent, @@ -867,7 +868,8 @@ echo "The message is Hola Mundo" PathBuf::from("/tmp")); // Platforms without standard package support require all packages to be native packages - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"))))] { tokio::fs::create_dir_all(pkg_install.installed_path()).await diff --git a/components/core/src/os/net.rs b/components/core/src/os/net.rs index 22e5395444..1d68f04245 100644 --- a/components/core/src/os/net.rs +++ b/components/core/src/os/net.rs @@ -65,6 +65,4 @@ fn test_fqdn_lookup() { fn test_fqdn_lookup_err() { let fqdn = lookup_fqdn(""); assert!(fqdn.is_err(), "Should be an Err()"); - assert_eq!(format!("{}", fqdn.unwrap_err()), - "failed to lookup address information: Name or service not known"); } diff --git a/components/core/src/package/ident.rs b/components/core/src/package/ident.rs index 2af3f1d91b..f05ebf1952 100644 --- a/components/core/src/package/ident.rs +++ b/components/core/src/package/ident.rs @@ -224,6 +224,12 @@ impl FromStr for PackageIdent { } } +impl From for PackageIdent { + fn from(ident: String) -> Self { + Self::from_str(ident.as_str()).expect("Invalid Package Identifier") + } +} + impl PartialOrd for PackageIdent { /// Packages can be compared according to the following: /// diff --git a/components/docs-chef-io/content/habitat/container_orchestration.md b/components/docs-chef-io/content/habitat/container_orchestration.md index 7390e22b52..8267bd6b06 100644 --- a/components/docs-chef-io/content/habitat/container_orchestration.md +++ b/components/docs-chef-io/content/habitat/container_orchestration.md @@ -12,4 +12,4 @@ gh_repo = "habitat" +++ -Chef Habitat packages may be exported with the Supervisor directly into a [variety of container formats]({{< relref "pkg_exports" >}}), but frequently the container is running in a container orchestrator such as Kubernetes or Mesos. Container orchestrators provide scheduling and resource allocation, ensuring workloads are running and available. Containerized Chef Habitat packages can run within these runtimes, managing the applications while the runtimes handle the environment surrounding the application (ie. compute, networking, security). +Chef Habitat packages may be exported with the Supervisor directly into a [variety of container formats]({{< relref "pkg_exports" >}}), but frequently the container is running in a container orchestrator such as Kubernetes. Container orchestrators provide scheduling and resource allocation, ensuring workloads are running and available. Containerized Chef Habitat packages can run within these runtimes, managing the applications while the runtimes handle the environment surrounding the application (ie. compute, networking, security). diff --git a/components/docs-chef-io/content/habitat/environment_variables.md b/components/docs-chef-io/content/habitat/environment_variables.md index 0ceeb576b2..d3743a7282 100644 --- a/components/docs-chef-io/content/habitat/environment_variables.md +++ b/components/docs-chef-io/content/habitat/environment_variables.md @@ -32,6 +32,7 @@ This is a list of all environment variables that can be used to modify the opera | `HAB_ORG` | Supervisor | no default | Organization to use when running with [service group encryption]({{< relref "sup_secure" >}}) | `HAB_ORIGIN` | build system | no default | Origin used to build packages. The signing key for this origin is passed to the build system. | | `HAB_ORIGIN_KEYS` | build system | no default | Comma-separated list of origin keys to automatically share with the build system | +| `HAB_REFRESH_CHANNEL` | build system | `stable` | Channel used to retrieve plan dependencies for Chef supported origins. | | `HAB_RING` | Supervisor | no default | The name of the ring used by the Supervisor when running with [wire encryption]({{< relref "sup_secure" >}}) | | `HAB_RING_KEY` | Supervisor | no default | The contents of the ring key when running with [wire encryption]({{< relref "sup_secure" >}}). Useful when running in a container. | | `HAB_STUDIO_SECRET_` | build system | no default | Prefix to allow environment variables into the Studio. The prefix will be removed and your variable will be passed into the Studio at build time. | diff --git a/components/docs-chef-io/content/habitat/install_faq.md b/components/docs-chef-io/content/habitat/install_faq.md index 35523ad62a..7c0910d3bf 100644 --- a/components/docs-chef-io/content/habitat/install_faq.md +++ b/components/docs-chef-io/content/habitat/install_faq.md @@ -39,4 +39,4 @@ If you're staunchly in the anti-curl-bash camp, you can get the latest packages **Q: How do I install `hab` across my server fleet?** -A: For the most part, we leave that up to you. You could just use the aforementioned curl-bash with your provisioner of choice. If your app was dockerized with Chef Habitat then you won't even need to ask this question, because you'll have everything you need inside your container. We are working on first class Mesosphere DC/OS, and Cloud Foundry integrations - which you can keep up to date on in our [Apache Mesos and DC/OS documentation]({{< relref "mesos_dcos" >}}) and [blog](https://www.chef.io/blog). +A: For the most part, we leave that up to you. You could use the aforementioned curl-bash with your provisioner of choice. If your app was dockerized with Chef Habitat, then you won't even need to ask this question because you'll have everything you need inside your container. diff --git a/components/docs-chef-io/content/habitat/mesos_dcos.md b/components/docs-chef-io/content/habitat/mesos_dcos.md deleted file mode 100644 index 92cce4ebbc..0000000000 --- a/components/docs-chef-io/content/habitat/mesos_dcos.md +++ /dev/null @@ -1,88 +0,0 @@ -+++ -title = "Apache Mesos and DC/OS" -description = "Apache Mesos and DC/OS" -gh_repo = "habitat" - -[menu] - [menu.habitat] - title = "Apache Mesos and DC/OS" - identifier = "habitat/containers/mesos-dcos" - parent = "habitat/containers" - weight = 70 -+++ - -[Apache Mesos](https://mesos.apache.org/) is an open source distributed systems kernel and the distributed systems kernel for [Mesosphere's DC/OS](https://dcos.io) distributed platform. - -## Mesos Containerizers - -Mesos has support for [containerizers](https://mesos.apache.org/documentation/latest/containerizers/) for running commands and applications within isolated containers. Mesos supports Docker and its own [Mesos containerizer](https://mesos.apache.org/documentation/latest/mesos-containerizer/) format. The Mesos containerizer provides lightweight containerization with `cgroups/namespaces` isolation without actual isolation. The `hab pkg export mesos` command creates a mostly empty base filesystem with the application and the Chef Habitat Supervisor and packages it into a compressed tarball. - -## Marathon Applications - -[Marathon](https://mesosphere.github.io/marathon/) is a container orchestration platform for Mesos and DC/OS, handling the scheduling and deployment of applications. [Marathon applications](https://mesosphere.github.io/marathon/docs/application-basics.html) support Docker and the Mesos container formats, wrapping them in JSON metadata describing the resources needed to deploy the application. Once the application has been deployed to Marathon, it schedules it across the Mesos cluster and ensures the application is running optimally. - -### Export to a Mesos Container and Marathon Application - -You can create native Mesos containers from Chef Habitat packages by following these steps: - -1. Create an interactive studio in any directory with the `hab studio enter` command. - -2. Install or [build]({{< relref "pkg_build" >}}) the Chef Habitat package from which you want to create a Marathon application, for example: - - ```bash - $ hab pkg install yourorigin/yourpackage - ``` - -3. Run the Mesos exporter on the package. - - ```bash - $ hab pkg export mesos yourorigin/yourpackage - ``` - -4. This will create a Mesos container-format tarball in the results directory, and also print the JSON needed to load the application into Marathon. Note that the tarball needs to be uploaded to a download location and the `"uris"` in the JSON need to be updated manually. This is an example of the output: - - ```json - { - "id": "yourorigin/yourpackage", - "cmd": "/bin/id -u hab &>/dev/null || /sbin/useradd hab; /bin/chown -R hab:hab *; mount -t proc proc proc/; mount -t sysfs sys sys/;mount -o bind /dev dev/; /usr/sbin/chroot . ./init.sh start yourorigin/yourpackage", - "cpus": 0.5, - "disk": 0, - "mem": 256, - "instances": 1, - "uris": [ "https://storage.googleapis.com/mesos-habitat/yourorigin/yourpackage-0.0.1-20160611121519.tgz" ] - } - ``` - -5. Note that the default resource allocation for the application is very small: 0.5 units of CPU, no disk, one instance, and 256MB of memory. To change these resource allocations, pass different values to the Mesos exporter as command line options (defaults are documented with `--help`). - -6. From the DC/OS web interface, launch the Marathon Service. - - ![Screen shot of DC/OS Services](/images/habitat/mesos1-services-marathon.png) - -7. Select "Create Application". - - ![Screen shot of Marathon Applications List](/images/habitat/mesos2-new-application.png) - -8. Click on the "JSON Mode" selector and enter the JSON output of the Mesos exporter and click "Create Application". - - ![Screen shot of Marathon New Application JSON Mode](/images/habitat/mesos3-new-application-json.png) - -9. Marathon will then deploy the application and enter the "Running" status. - - ![Screen shot of Marathon Application Running](/images/habitat/mesos4-application.png) - -## Debugging - -You can get to the output from the running application by clicking on the "Marathon" service from the DC/OS "Services" tab. Select the application and the "Log Viewer" and choose either the "Error" or "Output" to see `stderr` and `stdout` respectively. If you have SSH access into the nodes, the Mesos container directories are beneath `/var/lib/mesos/slave/slaves`. - ![Screen shot of Debugging a Running Application](/images/habitat/mesos5-debugging.png) - -## Future Enhancements - -This is a basic integration, there are many improvements yet to be made. Here are a few examples: - - * Marathon environment variables are not passed into the Chef Habitat package "cmd" yet. - * Networking ports exposed by Chef Habitat need to be added to the JSON. - * The Chef Habitat gossip protocol needs to be included as a default exposed port. - * If Marathon is running the [artifact store](https://mesosphere.github.io/marathon/docs/), support uploading the tarball directly into it. - * Upload applications directly to the [Marathon application API](https://mesosphere.github.io/marathon/docs/application-basics.html). - * Marathon supports unpacking several archive formats. Native `.hart` support could be added directly to Marathon. diff --git a/components/docs-chef-io/content/habitat/pkg_exports.md b/components/docs-chef-io/content/habitat/pkg_exports.md index e95dc50567..f3be0246f0 100644 --- a/components/docs-chef-io/content/habitat/pkg_exports.md +++ b/components/docs-chef-io/content/habitat/pkg_exports.md @@ -1,6 +1,6 @@ +++ title = "Exporting Packages" -description = "Export Chef Habitat packages to Docker, Kubernetes, Helm, Mesos, DC/OS, Cloud Foundry, or as a tarball " +description = "Export Chef Habitat packages to Docker, Kubernetes, Helm, or as a tar file" gh_repo = "habitat" [menu] @@ -13,7 +13,7 @@ gh_repo = "habitat" Chef Habitat Artifacts--`.hart` files--can be exported in a number of different formats depending on what you need and where you need it. This is powerful because you can use the same immutable Chef Habitat artifact by exporting it into a format that you need for a specific job. -You can export packages into several different external, immutable runtime formats. Currently there are exports for: docker, mesos, tar, and cloudfoundry. +You can export packages to a Docker container image or a tar file. The command to export a package is `hab pkg export `. See the [Chef Habitat CLI Reference Guide]({{< relref "habitat_cli#hab-pkg-export" >}}) for more CLI information. @@ -99,115 +99,3 @@ You can create a Docker container image for any package by performing the follow sudo /hab/bin/hab sup run sudo /hab/bin/hab svc load / ``` - -### Exporting to Apache Mesos and DC/OS - -1. Create an interactive studio in any directory with the `hab studio enter` command. - -2. Install or [build]({{< relref "pkg_build" >}}) the Chef Habitat package from which you want to create a Marathon application, for example: - - ```bash - hab pkg install / - ``` - -3. Run the Mesos exporter on the package. - - ```bash - hab pkg export mesos / - ``` - -4. This will create a Mesos container-format tarball in the results directory, and also print the JSON needed to load the application into Marathon. Note that the tarball needs to be uploaded to a download location and the "uris" in the JSON need to be updated manually. This is an example of the output: - - ```json - { "id": "yourorigin/yourpackage", "cmd": "/bin/id -u hab &>/dev/null || /sbin/useradd hab; /bin/chown -R hab:hab *; - mount -t proc proc proc/; mount -t sysfs sys sys/;mount -o bind /dev dev/; /usr/sbin/chroot . ./init.sh start - yourorigin/yourpackage", "cpus": 0.5, "disk": 0, "mem": 256, "instances": 1, "uris": - ["https://storage.googleapis.com/mesos-habitat/yourorigin/yourpackage-0.0.1-20160611121519.tgz" ] } - ``` - -5. Note that the default resource allocation for the application is very small: 0.5 units of CPU, no disk, one instance, and 256MB of memory. To change these resource allocations, pass different values to the Mesos exporter as command line options (defaults are documented with `--help`). - -6. See the [Apaches Mesos and DC/OS documentation]({{< relref "mesos_dcos" >}}) for more information on getting your application running on Mesos. - -### Exporting to Cloud Foundry - -Packages can be exported to run in a [Cloud Foundry platform](https://www.cloudfoundry.org/certified-platforms/) through the use of a Docker image that contains additional layers meant to handle mapping from the Cloud Foundry environment to a Chef Habitat default.toml file. - -#### Setting up Docker Support in Cloud Foundry - -If you have not done so already, you must enable Docker support for Cloud Foundry before you can upload your Cloud Foundry-specific Docker image. - -To do so, make sure you have done the following: - -1. Log in as an Admin user. -2. Enable Docker support on your Cloud Foundry deployment by enabling the `diego_docker` feature flag. - -```bash -cf enable-feature-flag diego_docker -``` - -#### Creating a Mapping File - -The mapping file is a TOML file that can add Bash-interpolated variables and scripts. The Bash code will have access to: - -* all environment variables -* the jq binary -* the helper methods listed below - -Here's an example of a mapping TOML file named `cf-mapping.toml`: - -```toml cf-mapping.toml -secret_key_base = "$SECRET_KEY_BASE" -rails_env = "$RAILS_ENV" -port = ${PORT} - -[db] -user = "$(service "elephantsql" '.credentials.username')" -password = "$(service "elephantsql" '.credentials.password')" -host = "$(service "elephantsql" '.credentials.host')" -name = "$(service "elephantsql" '.credentials.database')" -``` - -#### Helpers - -The helper methods are designed to extract information from the standard Cloud Foundry environment variables [VCAP_SERVICES](https://docs.cloudfoundry.org/devguide/deploy-apps/environment-variable.html#VCAP-SERVICES) and [VCAP_APPLICATION](https://docs.cloudfoundry.org/devguide/deploy-apps/environment-variable.html#VCAP-APPLICATION). - -* `service ` will extract the JSON associated with the given service-name from the `VCAP_SERVICES` environment variable and apply the jq-expression to it. -* `application ` will apply the jq-expression to the `VCAP_APPLICATION` environment variable - -### Exporting and Pushing to a Cloud Foundry Endpoint - -1. Create a mapping.toml file using the format specified above and place that file in your local project repo. - -2. Enter the Studio through `hab studio enter`. - -3. Install or [build]({{< relref "pkg_build" >}}) the package that you want to export. - - ```bash - hab pkg install / - ``` - -4. Run the Cloud Foundry exporter on the package. - - ```bash - hab pkg export cf / /path/to/mapping.toml - ``` - - > **Note** To generate this image, a base Docker image is also created. The Cloud Foundry version of the docker image will have `cf-` as a prefix in the image tag. - -5. (Optional) If you are creating a web app that binds to another Cloud Foundry service, such as ElephantSQL, you must have this service enabled in your deployment before running your app. - -6. [Upload your Docker image to a supported registry](https://docs.cloudfoundry.org/devguide/deploy-apps/push-docker.html). Your Docker repository should be match the `origin/package` identifier of your package. - - ```bash - docker push origin/package:cf-version-release - ``` - -7. After your Cloud Foundry Docker image is built, you can deploy it to a Cloud Foundry platform. - - ```bash - cf push APP-NAME --docker-image docker_org/repository - ``` - - Your application will start after it has been successfully uploaded and deployed. - diff --git a/components/docs-chef-io/content/habitat/sup_rings.md b/components/docs-chef-io/content/habitat/sup_rings.md index 5ba5afec1c..ccac6545a4 100644 --- a/components/docs-chef-io/content/habitat/sup_rings.md +++ b/components/docs-chef-io/content/habitat/sup_rings.md @@ -17,15 +17,13 @@ A "Bastion Ring" is a pattern for preventing rumor loss and a split brain in a n ## Using a Scheduler -**Note:** If you are using a container scheduler such as the Kubernetes `kube-scheduler`, Docker Swarm mode, Mesos DC/OS's Marathon or Chronos, or a PaaS such as Cloud Foundry, you should not follow the bastion ring pattern, because the scheduler handles persistence and orchestration on your behalf. +**Note:** If you are using a container scheduler such as the Kubernetes `kube-scheduler` or Docker Swarm mode, you should not follow the bastion ring pattern, because the scheduler handles persistence and orchestration on your behalf. More resources on schedulers: - [Chef Habitat Container Orchestration]({{< relref "container_orchestration" >}}) - Kubernetes `kube-scheduler`: https://kubernetes.io/docs/concepts/scheduling-eviction/kube-scheduler - Docker Swarm mode: https://docs.docker.com/engine/swarm -- Mesos DC/OS Marathon or Chronos: https://mesosphere.github.io/marathon/ -- Cloud Foundry: https://www.habitat.sh/get-started/cloudfoundry ## Initial Peer(s) diff --git a/components/hab/Cargo.toml b/components/hab/Cargo.toml index c7048cf0a8..e7633ba440 100644 --- a/components/hab/Cargo.toml +++ b/components/hab/Cargo.toml @@ -15,8 +15,6 @@ doc = false base64 = "*" bitflags = "*" chrono = {version = "*", features = ["serde"]} -clap = { git = "https://github.com/habitat-sh/clap.git", branch = "v2-master", features = [ "suggestions", "color", "unstable" ] } -configopt = { git = "https://github.com/habitat-sh/configopt.git" } ctrlc = "*" dirs = "*" env_logger = "*" @@ -42,7 +40,6 @@ same-file = "*" serde = { version = "*", features = ["derive"] } serde_json = { version = "*", features = [ "preserve_order" ] } serde_yaml = "*" -structopt = { git = "https://github.com/habitat-sh/structopt.git" } tabwriter = "*" tar = "*" termcolor = "*" @@ -55,6 +52,13 @@ walkdir = "*" rustls-webpki = { version = "*", features = ["alloc"] } tempfile = "*" + +clap = { git = "https://github.com/habitat-sh/clap.git", branch = "v2-master", features = [ "suggestions", "color", "unstable" ] , optional = true} +configopt = { git = "https://github.com/habitat-sh/configopt.git" , optional = true} +structopt = { git = "https://github.com/habitat-sh/structopt.git" , optional = true} + +clap_v4 = { version = "4", package = "clap", features = ["env", "derive", "string", "wrap_help"], optional = true } + [dependencies.uuid] version = "*" features = ["v4"] @@ -65,6 +69,8 @@ winapi = { version = "^0.3", features = ["winuser", "windef"] } winreg = "*" [features] -default = ["supported_targets"] +v2 = [ "clap", "configopt", "structopt" ] +v4 = [ "supported_targets", "clap_v4" ] +default = ["supported_targets", "v2"] functional = [] supported_targets = ["habitat_core/supported_targets"] diff --git a/components/hab/habitat/plan.sh b/components/hab/habitat/plan.sh index daa15efe6e..e9d25f2831 100644 --- a/components/hab/habitat/plan.sh +++ b/components/hab/habitat/plan.sh @@ -53,7 +53,15 @@ do_before() { do_prepare() { _common_prepare - export rustc_target="x86_64-unknown-linux-musl" + # With the musl target, the ring crate is looking for aarch64-linux-musl-gcc, + # but the core/musl package provides musl-gcc. This workaround is necessary until the appropriate changes are made to core/musl for aarch64. + if [[ "${pkg_target%%-*}" == "aarch64" ]]; then + if [[ ! -r "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" ]]; then + ln -sv "$(pkg_path_for musl)/bin/musl-gcc" "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" + fi + fi + + export rustc_target="${pkg_target%%-*}-unknown-linux-musl" build_line "Setting rustc_target=$rustc_target" # Used to find libgcc_s.so.1 when compiling `build.rs` in dependencies. Since @@ -79,12 +87,12 @@ do_prepare() { do_build() { pushd "$SRC_PATH" > /dev/null || exit - cargo build ${build_type#--debug} --target=$rustc_target --verbose + cargo build ${build_type#--debug} --target="$rustc_target" --verbose popd > /dev/null || exit } do_install() { - install -v -D "$CARGO_TARGET_DIR"/$rustc_target/${build_type#--}/$bin \ + install -v -D "$CARGO_TARGET_DIR"/"$rustc_target"/${build_type#--}/$bin \ "$pkg_prefix"/bin/$bin } diff --git a/components/hab/src/cli.rs b/components/hab/src/cli.rs index 0a631672ec..55a9ebeaa2 100644 --- a/components/hab/src/cli.rs +++ b/components/hab/src/cli.rs @@ -11,10 +11,7 @@ use habitat_common::{cli::{file_into_idents, use habitat_core::{origin::Origin as CoreOrigin, package::{Identifiable, PackageIdent}}; -use serde::{Deserialize, - Serialize}; -use std::{fmt, - path::Path, +use std::{path::Path, result, str::FromStr}; use structopt::StructOpt; @@ -23,45 +20,9 @@ use structopt::StructOpt; /// ran to completion with a successful result. The Launcher should not attempt to restart /// the Supervisor and should exit immediately with a successful exit code. pub const OK_NO_RETRY_EXCODE: i32 = 84; -pub const AFTER_HELP: &str = - "\nALIASES:\n apply Alias for: 'config apply'\n install Alias for: 'pkg \ - install'\n run Alias for: 'sup run'\n setup Alias for: 'cli setup'\n \ - start Alias for: 'svc start'\n stop Alias for: 'svc stop'\n term \ - Alias for: 'sup term'\n"; pub fn get(_feature_flags: FeatureFlag) -> App<'static, 'static> { Hab::clap() } -//////////////////////////////////////////////////////////////////////// - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Serialize)] -pub enum KeyType { - Public, - Secret, -} - -impl FromStr for KeyType { - type Err = crate::error::Error; - - fn from_str(value: &str) -> result::Result { - match value { - "public" => Ok(Self::Public), - "secret" => Ok(Self::Secret), - _ => Err(Self::Err::KeyTypeParseError(value.to_string())), - } - } -} - -impl fmt::Display for KeyType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - KeyType::Public => write!(f, "public"), - KeyType::Secret => write!(f, "secret"), - } - } -} - -//////////////////////////////////////////////////////////////////////// - pub fn parse_optional_arg(name: &str, m: &ArgMatches) -> Option where ::Err: std::fmt::Debug { @@ -147,20 +108,20 @@ mod tests { "sup", "run", "--application", - "--environment=env"]); + "--environment=env",]); assert!(r.is_ok()); let r = get(no_feature_flags()).get_matches_from_safe(vec!["hab", "svc", "load", "--application=app", "--environment", - "pkg/ident"]); + "pkg/ident",]); assert!(r.is_ok()); let r = get(no_feature_flags()).get_matches_from_safe(vec!["hab", "svc", "load", "--application", - "pkg/ident"]); + "pkg/ident",]); assert!(r.is_ok()); } diff --git a/components/hab/src/cli/hab.rs b/components/hab/src/cli/hab.rs index 1287f2fb30..1f77922b00 100644 --- a/components/hab/src/cli/hab.rs +++ b/components/hab/src/cli/hab.rs @@ -10,10 +10,12 @@ pub mod ring; pub mod studio; pub mod sup; pub mod svc; -#[cfg(test)] -mod tests; pub mod user; pub mod util; + +#[cfg(test)] +mod tests; + #[cfg(any(target_os = "macos", any(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), @@ -57,7 +59,7 @@ use self::{bldr::*, UserKeyGenerate}, util::{CacheKeyPath, ConfigOptCacheKeyPath}}; -use crate::{cli::AFTER_HELP, +use crate::{AFTER_HELP, VERSION}; use configopt::ConfigOpt; use structopt::{clap::AppSettings, diff --git a/components/hab/src/cli/hab/origin.rs b/components/hab/src/cli/hab/origin.rs index 94e41a2338..c32e401125 100644 --- a/components/hab/src/cli/hab/origin.rs +++ b/components/hab/src/cli/hab/origin.rs @@ -6,8 +6,8 @@ use super::util::{AuthToken, ConfigOptBldrOrigin, ConfigOptBldrUrl, ConfigOptCacheKeyPath}; -use crate::cli::{valid_origin, - KeyType}; +use crate::{cli::valid_origin, + key_type::KeyType}; use configopt::ConfigOpt; use habitat_core::origin::OriginMemberRole; use serde::Serialize; diff --git a/components/hab/src/cli/hab/pkg.rs b/components/hab/src/cli/hab/pkg.rs index d6037be917..4c00e5d9da 100644 --- a/components/hab/src/cli/hab/pkg.rs +++ b/components/hab/src/cli/hab/pkg.rs @@ -63,7 +63,7 @@ pub struct PkgPath { /// Displays the binds for a service #[derive(ConfigOpt, StructOpt)] -#[structopt(name = "binds", no_version)] +#[structopt(name = "binds", no_version, settings = &[AppSettings::ArgRequiredElseHelp])] pub struct PkgBinds { #[structopt(flatten)] pkg_ident: PkgIdent, @@ -541,18 +541,12 @@ pub struct PkgInstall { #[derive(ConfigOpt, StructOpt)] #[structopt(name = "export", aliases = &["e", "ex", "exp", "expo", "expor"], no_version)] pub enum ExportCommand { - #[cfg(target_os = "linux")] - /// Cloud Foundry exporter - Cf(ExternalCommandArgs), #[cfg(any(target_os = "linux", target_os = "windows"))] /// Container exporter Container(ExternalCommandArgs), #[cfg(any(target_os = "linux", target_os = "windows"))] #[structopt(settings = &[AppSettings::Hidden])] Docker(ExternalCommandArgs), - /// Mesos exporter - #[cfg(target_os = "linux")] - Mesos(ExternalCommandArgs), /// Tar exporter #[cfg(any(target_os = "linux", target_os = "windows"))] Tar(ExternalCommandArgs), diff --git a/components/hab/src/cli/hab/tests.rs b/components/hab/src/cli/hab/tests.rs index 0b2338f5d5..4e40fca7d4 100644 --- a/components/hab/src/cli/hab/tests.rs +++ b/components/hab/src/cli/hab/tests.rs @@ -439,12 +439,14 @@ fn test_hab_svc_load_flag_ordering() { assert!(!load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "--force", "core/redis"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "--force", "core/redis"]) + .unwrap(); let load = extract_hab_svc_load(hab); assert!(load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "core/redis", "--force"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "core/redis", "--force"]) + .unwrap(); let load = extract_hab_svc_load(hab); assert!(load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); @@ -452,7 +454,8 @@ fn test_hab_svc_load_flag_ordering() { #[test] fn test_hab_svc_update_empty_binds() { - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "update", "core/redis", "--bind"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "update", "core/redis", "--bind"]) + .unwrap(); let update = extract_hab_svc_update(hab); assert_eq!(update.bind, Some(vec![])); diff --git a/components/hab/src/cli_v4.rs b/components/hab/src/cli_v4.rs new file mode 100644 index 0000000000..654b6efb81 --- /dev/null +++ b/components/hab/src/cli_v4.rs @@ -0,0 +1,141 @@ +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{ui::UI, + FeatureFlag}; + +use crate::{error::Result as HabResult, + AFTER_HELP, + VERSION}; + +mod pkg; +use pkg::PkgCommand; + +mod utils; +use utils::CacheKeyPath; + +#[derive(Debug, Clone, Parser)] +#[command(name = "hab", + version = VERSION, + about = "Patents: https://chef.io/patents\n\"A Habitat is the natural environment for your services\" - Alan Turing", + author = "\nThe Habitat Maintainers ", + after_help = AFTER_HELP, + arg_required_else_help = true, + propagate_version = true, + help_template = "{name} {version} {author-section} {about-section} \ + \n{usage-heading} {usage}\n\n{all-args}\n{after-help}\n", + )] +enum Hab { + /// Commands relating to Habitat Builder + Bldr(BldrCommand), + + /// Commands relating to Habitat runtime config + Cli(CliCommand), + + Config(ConfigCommand), + + File(FileCommand), + + License(LicenseCommand), + + Origin(OriginCommand), + + /// Commands relating to Habitat packages + #[clap(subcommand)] + Pkg(PkgCommand), + + Plan(PlanCommand), + + Ring(RingCommand), + + Studio(StudioCommand), + + Sup(SupCommand), + + SupportBundle, + + Svc(SvcCommand), + + User(UserCommand), + + // Aliases Below + Apply(ServiceConfigCommand), + + Install(PkgInstallCommand), + + Run(SupRunCommand), + + Setup(CacheKeyPath), + + Start(SvcStartCommand), + + Stop(SvcStopCommand), + + Term, +} + +impl Hab { + async fn do_cli_command(&self, ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + match self { + Self::Pkg(pkg_command) => pkg_command.do_command(ui, feature_flags).await, + _ => todo!(), + } + } +} + +#[derive(Clone, Debug, Parser)] +pub(crate) struct BldrCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct CliCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct ConfigCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct FileCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct LicenseCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct OriginCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct PlanCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct RingCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct StudioCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SupCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct UserCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct ServiceConfigCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct PkgInstallCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SupRunCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcStartCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcStopCommand; + +pub async fn cli_driver(ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + let cli = Hab::parse(); + cli.do_cli_command(ui, feature_flags).await +} diff --git a/components/hab/src/cli_v4/pkg.rs b/components/hab/src/cli_v4/pkg.rs new file mode 100644 index 0000000000..dc3eadf282 --- /dev/null +++ b/components/hab/src/cli_v4/pkg.rs @@ -0,0 +1,190 @@ +// Implementation of `hab pkg` command + +use clap_v4 as clap; + +use clap::Subcommand; + +use habitat_common::{ui::UI, + FeatureFlag}; + +use crate::error::Result as HabResult; + +mod binds; +mod binlink; +mod build; +mod bulk_upload; + +mod channels; +mod config; + +mod delete; +mod demote; +mod dependencies; +mod download; + +mod env; +mod exec; + +#[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] +mod export; + +mod hash; +mod header; + +mod info; +mod install; + +mod list; + +mod uninstall; +mod upload; + +mod path; +mod promote; +mod provides; + +mod search; +mod sign; + +mod verify; + +#[derive(Clone, Debug, Subcommand)] +#[command(author = "\nThe Habitat Maintainers ", + arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(super) enum PkgCommand { + /// Displays the binds for a service + Binds(binds::PkgBindsOptions), + + /// Creates a binlink for a package binary in a common 'PATH' location + Binlink(binlink::PkgBinlinkOptions), + + /// Builds a plan using Habitat Studio + Build(build::PkgBuildOptions), + + /// Bulk uploads Habitat artifacts from to a depot from a local directory + Bulkupload(bulk_upload::PkgBulkUploadOptions), + + /// Find out what channels a package belongs to + Channels(channels::PkgChannelsOptions), + + /// Displays the default configuration options for a service + Config(config::PkgConfigOptions), + + /// Removes a package from Builder + Delete(delete::PkgDeleteOptions), + + /// Demote a package from a specified channel + Demote(demote::PkgDemoteOptions), + + /// Returns Habitat Artifact dependencies, by default the direct dependencies + /// of the package + Dependencies(dependencies::PkgDependenciesOptions), + + /// Download Habitat artifacts (including dependencies and keys) from Builder + Download(download::PkgDownloadOptions), + + /// Prints the runtime environment of a specific installed package + Env(env::PkgEnvOptions), + + /// Execute a command using the 'PATH' context of an installed package + Exec(exec::PkgExecOptions), + + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] + #[clap(subcommand)] + /// Exports the package to the specified format + Export(export::PkgExportCommand), + + /// Generates a blake2b hashsum from a target at any given filepath + Hash(hash::PkgHashOptions), + + /// Returns the Habitat Artifact header + Header(header::PkgHeaderOptions), + + /// Returns the Habitat Artifact information + Info(info::PkgInfoOptions), + + /// Installs a Habitat package from Builder or locally from a Habitat Artifact + Install(install::PkgInstallOptions), + + /// List all versions of installed packages + List(list::PkgListOptions), + + /// Prints the path to a specific installed release of a package + Path(path::PkgPathOptions), + + /// Promote a package to a specified channel + Promote(promote::PkgPromoteOptions), + + /// Search installed Habitat packages for a given file + Provides(provides::PkgProvidesOptions), + + /// Search for a package in Builder + Search(search::PkgSearchOptions), + + /// Signs an archive with an origin key, generating a Habitat Artifact + Sign(sign::PkgSignOptions), + + /// Safely uninstall a package and dependencies from a local filesystem + Uninstall(uninstall::PkgUninstallOptions), + + /// Uploads a local Habitat Artifact to Builder + Upload(upload::PkgUploadOptions), + + /// Verifies a Habitat Artifact with an origin key + Verify(verify::PkgVerifyOptions), +} + +impl PkgCommand { + pub(crate) async fn do_command(&self, + ui: &mut UI, + feature_flags: FeatureFlag) + -> HabResult<()> { + match self { + Self::Binds(opts) => opts.do_binds(), + Self::Binlink(opts) => opts.do_binlink(ui), + Self::Build(opts) => opts.do_build(ui, feature_flags).await, + Self::Bulkupload(opts) => opts.do_bulkupload(ui).await, + + Self::Channels(opts) => opts.do_channels(ui).await, + Self::Config(opts) => opts.do_config(), + + Self::Delete(opts) => opts.do_delete(ui).await, + Self::Demote(opts) => opts.do_demote(ui).await, + Self::Dependencies(opts) => opts.do_dependencies(), + Self::Download(opts) => opts.do_download(ui).await, + + Self::Env(opts) => opts.do_env(), + Self::Exec(opts) => opts.do_exec(), + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] + Self::Export(cmd) => cmd.do_export(ui).await, + + Self::Hash(opts) => opts.do_hash(), + Self::Header(opts) => opts.do_header(ui), + + Self::Info(opts) => opts.do_info(ui), + Self::Install(opts) => opts.do_install(ui, feature_flags).await, + + Self::List(opts) => opts.do_list(), + + Self::Path(opts) => opts.do_path(), + Self::Promote(opts) => opts.do_promote(ui).await, + Self::Provides(opts) => opts.do_provides(), + + Self::Search(opts) => opts.do_search().await, + Self::Sign(opts) => opts.do_sign(ui), + + Self::Uninstall(opts) => opts.do_uninstall(ui).await, + Self::Upload(opts) => opts.do_upload(ui).await, + + Self::Verify(opts) => opts.do_verify(ui), + } + } +} diff --git a/components/hab/src/cli_v4/pkg/binds.rs b/components/hab/src/cli_v4/pkg/binds.rs new file mode 100644 index 0000000000..89d22823a8 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/binds.rs @@ -0,0 +1,29 @@ +// Implemenatation of `hab pkg binds` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + command::package::binds}; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBindsOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgBindsOptions { + pub(super) fn do_binds(&self) -> HabResult<()> { + binds::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/binlink.rs b/components/hab/src/cli_v4/pkg/binlink.rs new file mode 100644 index 0000000000..4dc9e768d3 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/binlink.rs @@ -0,0 +1,64 @@ +// Implemenatation of `hab pkg binlink` + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + BINLINK_DIR_ENVVAR, + DEFAULT_BINLINK_DIR}, + ui::UI}; + +use crate::{command::pkg::binlink, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBinlinkOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// The command to binlink (ex: bash) + #[arg(name = "BINARY")] + binary: Option, + + /// Set the destination directory + #[arg(name = "DEST_DIR", + short = 'd', + long = "dest", + env = BINLINK_DIR_ENVVAR, + default_value = DEFAULT_BINLINK_DIR)] + dest_dir: PathBuf, + + /// Overwrite existing binlinks + #[arg(name = "FORCE", short = 'f', long = "force", action = ArgAction::SetTrue)] + force: bool, +} + +impl PkgBinlinkOptions { + pub(super) fn do_binlink(&self, ui: &mut UI) -> HabResult<()> { + if let Some(binary) = &self.binary { + binlink::start(ui, + &self.pkg_ident, + &binary, + &self.dest_dir, + &FS_ROOT_PATH, + self.force) + } else { + binlink::binlink_all_in_pkg(ui, + &self.pkg_ident, + &self.dest_dir, + &FS_ROOT_PATH, + self.force) + } + } +} diff --git a/components/hab/src/cli_v4/pkg/build.rs b/components/hab/src/cli_v4/pkg/build.rs new file mode 100644 index 0000000000..7e6a43c566 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/build.rs @@ -0,0 +1,133 @@ +// Implemenatation of `hab pkg build` +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::ui::UI; + +use habitat_common::FeatureFlag; + +use habitat_core::{crypto, + crypto::keys::KeyCache, + origin::Origin}; + +use crate::{command::pkg::build, + error::Result as HabResult}; + +#[cfg(target_os = "linux")] +use crate::error::Error as HabError; + +use crate::cli_v4::utils::CacheKeyPath; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBuildOptions { + // TODO: Should multiple Origins be supported? The semantics looks like that but the original + // v2 code does not look like supporting. + /// Installs secret origin keys (ex: "unicorn", "acme,other,acme-ops") + #[arg(name = "HAB_ORIGIN_KEYS", short = 'k', long = "keys", action = ArgAction::Append)] + hab_origin_keys: Vec, + + // TODO: Make it a more semantic `PathBuf` Currently not done due to limitation of + // `command::pkg::build`. Revisit it after removing `clap-v2` + /// Sets the Studio root (default: /hab/studios/) + #[arg(name = "HAB_STUDIO_ROOT", short = 'r', long = "root")] + hab_studio_root: Option, + + // TODO: Same as above + /// Sets the source path [default: $PWD] + #[arg(name = "SRC_PATH", short = 's', long = "src")] + src_path: Option, + + // TODO : Same as above + /// A directory containing a plan file or a `habitat/` directory which contains the plan + /// file + #[arg(name = "PLAN_CONTEXT")] + plan_context: String, + + #[command(flatten)] + cache_key_path: CacheKeyPath, + + #[cfg(target_os = "linux")] + /// Build a native package on the host system without a studio + #[arg(name = "NATIVE_PACKAGE", short = 'N', long = "native-package", conflicts_with_all = &["REUSE", "DOCKER"])] + native_package: bool, + + #[cfg(any(target_os = "linux", target_os = "windows"))] + /// Reuses a previous Studio for the build (default: clean up before building) + // Only a truly native/local Studio can be reused--the Docker implementation will always be + // ephemeral + #[arg(name = "REUSE", short = 'R', long = "reuse", action = ArgAction::SetTrue)] + reuse: bool, + + #[cfg(any(target_os = "linux", target_os = "windows"))] + /// Uses a Dockerized Studio for the build + #[arg(name = "DOCKER", short = 'D', long = "docker", action = ArgAction::SetTrue)] + docker: bool, + + /// Channel used to retrieve plan dependencies for Chef supported origins + #[arg(name = "REFRESH_CHANNEL", + short = 'f', + long = "refresh-channel", + env = "HAB_REFRESH_CHANNEL", + default_value = "stable")] + refresh_channel: Option, +} + +impl PkgBuildOptions { + // Required because of lot of `cfg`... + #[allow(unused_variables)] + pub(super) async fn do_build(&self, ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + if !self.hab_origin_keys.is_empty() { + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + for origin in self.hab_origin_keys.iter() { + // Validate that a secret signing key is present on disk + // for each origin. + key_cache.latest_secret_origin_signing_key(origin)?; + } + } + + let native_package = false; + + let native_package = self.should_build_native_package(feature_flags)?; + + let (reuse_flag, docker_flag) = (false, false); + + #[cfg(any(target_os = "linux", target_os = "windows"))] + let (reuse_flag, docker_flag) = (self.reuse, self.docker); + + build::start(ui, + self.plan_context.as_ref(), + self.hab_studio_root.as_deref(), + self.src_path.as_deref(), + &self.hab_origin_keys, + native_package, + reuse_flag, + docker_flag, + self.refresh_channel.as_deref()).await + } + + #[cfg(target_os = "linux")] + fn should_build_native_package(&self, feature_flags: FeatureFlag) -> HabResult { + if self.native_package { + if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { + return Err(HabError::ArgumentError(String::from("`--native-package` is only \ + available when \ + `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ + is set"))); + } + Ok(true) + } else { + Ok(false) + } + } + + #[cfg(not(target_os = "linux"))] + fn should_build_native_package(&self, _: FeatureFlag) -> HabResult { Ok(false) } +} diff --git a/components/hab/src/cli_v4/pkg/bulk_upload.rs b/components/hab/src/cli_v4/pkg/bulk_upload.rs new file mode 100644 index 0000000000..aa6367f6ff --- /dev/null +++ b/components/hab/src/cli_v4/pkg/bulk_upload.rs @@ -0,0 +1,85 @@ +// Implemenatation of `hab pkg bulkupload` + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::clap_validators::DirExistsValueParser, + ui::UI}; + +use habitat_core::{crypto::keys::KeyCache, + ChannelIdent}; + +use habitat_api_client::BuildOnUpload; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::bulkupload, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBulkUploadOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Optional additional release channel to upload package to. Packages are always uploaded + /// to `unstable`, regardless of the value of this option + #[arg(name = "CHANNEL", short = 'c', long = "channel")] + channel: Option, + + /// Skip checking availability of package and force uploads, potentially overwriting a + /// stored copy of a package + #[arg(name = "FORCE", long = "force", action = ArgAction::SetTrue)] + force: bool, + + // TODO: This option is to be removed? + /// Enable auto-build for all packages in this upload. Only applicable to SaaS Builder + #[arg(name = "AUTO_BUILD", long = "auto-build", action = ArgAction::SetTrue)] + auto_build: bool, + + /// Skip the confirmation prompt and automatically create origins that do not exist in the + /// target Builder + #[arg(name = "AUTO_CREATE_ORIGINS", long = "auto-create-origins", action = ArgAction::SetTrue)] + auto_create_channels: bool, + + // TODO: Add Path Exists validator. + /// Directory Path from which artifacts will be uploaded + #[arg(name = "UPLOAD_DIRECTORY", value_parser = DirExistsValueParser)] + upload_directory: PathBuf, +} + +impl PkgBulkUploadOptions { + pub(super) async fn do_bulkupload(&self, ui: &mut UI) -> HabResult<()> { + let artifact_path = self.upload_directory.join("artifacts"); + let key_path = self.upload_directory.join("keys"); + let key_cache = KeyCache::new(key_path); + key_cache.setup()?; + + let auto_build = if self.auto_build { + BuildOnUpload::PackageDefault + } else { + BuildOnUpload::Disable + }; + + let auth_token = self.auth_token.from_cli_or_config()?; + + bulkupload::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &auth_token, + &artifact_path, + self.force, + auto_build, + self.auto_create_channels, + &key_cache).await + } +} diff --git a/components/hab/src/cli_v4/pkg/channels.rs b/components/hab/src/cli_v4/pkg/channels.rs new file mode 100644 index 0000000000..93182a84f0 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/channels.rs @@ -0,0 +1,57 @@ +// Implementation of `hab pkg channels` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::package::{target, + PackageIdent, + PackageTarget}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::channels, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgChannelsOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgChannelsOptions { + pub(super) async fn do_channels(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + channels::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + auth_token.as_deref()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/config.rs b/components/hab/src/cli_v4/pkg/config.rs new file mode 100644 index 0000000000..a90d08d8ce --- /dev/null +++ b/components/hab/src/cli_v4/pkg/config.rs @@ -0,0 +1,29 @@ +// Implemenatation of `hab pkg config` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + command::package::config}; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgConfigOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgConfigOptions { + pub(super) fn do_config(&self) -> HabResult<()> { + config::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/delete.rs b/components/hab/src/cli_v4/pkg/delete.rs new file mode 100644 index 0000000000..0734f0cef0 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/delete.rs @@ -0,0 +1,57 @@ +// Implementation of `hab pkg delete` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::package::{target, + PackageIdent, + PackageTarget}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::delete, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDeleteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgDeleteOptions { + pub(super) async fn do_delete(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + delete::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &auth_token).await + } +} diff --git a/components/hab/src/cli_v4/pkg/demote.rs b/components/hab/src/cli_v4/pkg/demote.rs new file mode 100644 index 0000000000..b7f2fece67 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/demote.rs @@ -0,0 +1,63 @@ +// Implementation of `hab pkg demote` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::demote, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDemoteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// Demote from the specified release channel + #[arg(name = "CHANNEL")] + channel: ChannelIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgDemoteOptions { + pub(super) async fn do_demote(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + demote::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &self.channel, + auth_token.as_str()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/dependencies.rs b/components/hab/src/cli_v4/pkg/dependencies.rs new file mode 100644 index 0000000000..2a58ab8b53 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/dependencies.rs @@ -0,0 +1,52 @@ +// Implemenatation of `hab pkg dependencies` + +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::{command::pkg::{dependencies, + DependencyRelation, + Scope}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDependenciesOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// Show transitive dependencies + #[arg(name = "TRANSITIVE", short = 't', long = "transitive", action= ArgAction::SetTrue)] + transitive: bool, + + /// Show packages which are dependant on this one + #[arg(name = "REVERSE", short = 'r', long = "reverse", action = ArgAction::SetTrue)] + reverse: bool, +} + +impl PkgDependenciesOptions { + pub(super) fn do_dependencies(&self) -> HabResult<()> { + let scope = if self.transitive { + Scope::PackageAndDependencies + } else { + Scope::Package + }; + + let relation = if self.reverse { + DependencyRelation::Supports + } else { + DependencyRelation::Requires + }; + + dependencies::start(&self.pkg_ident, scope, relation, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/download.rs b/components/hab/src/cli_v4/pkg/download.rs new file mode 100644 index 0000000000..7fc2fc1da2 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/download.rs @@ -0,0 +1,180 @@ +// Implementation of `hab pkg download` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::{clap_validators::{HabPkgIdentValueParser, + TomlOrPkgIdentFileValueParser}, + file_into_idents, + is_toml_file, + PACKAGE_TARGET_ENVVAR}, + ui::UI, + Error as HabitatCommonError}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::{download, + download::{PackageSet, + PackageSetFile}}, + error::Result as HabResult, + PRODUCT, + VERSION}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDownloadOptions { + #[command(flatten)] + auth_token: AuthToken, + + #[command(flatten)] + bldr_url: BldrUrl, + + /// Download from the specified release channel. Overridden if channel is specified in toml + /// file + #[arg(name = "CHANNEL", + short = 'c', + long = "channel", + default_value = "stable")] + channel: ChannelIdent, + + /// The path to store downloaded artifacts + #[arg(name = "DOWNLOAD_DIRECTORY", long = "download-directory")] + download_directory: Option, + + /// File with newline separated package identifiers, or TOML file (ending with .toml extension) + #[arg(name = "PKG_IDENT_FILE", long = "file", num_args = 1..=10, value_parser = TomlOrPkgIdentFileValueParser)] + pkg_ident_file: Vec, + + /// One or more Package Identifiers to download (eg. core/redis) + #[arg(name = "PKG_IDENT", num_args = 1.., value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: Vec, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR, short = 't', long = "target")] + pkg_target: Option, + + /// Verify package integrity after download (Warning: this can be slow) + #[arg(name = "VERIFY", long = "verify", action = ArgAction::SetTrue)] + verify: bool, + + /// Ignore packages specified that are not present on the target Builder + #[arg(name = "IGNORE_MISSING_SEEDS", long = "ignore-missing-seeds", action = ArgAction::SetTrue)] + ignore_missing_seed: bool, +} + +impl PkgDownloadOptions { + pub(super) async fn do_download(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + let mut package_sets = vec![]; + + if !self.pkg_ident.is_empty() { + package_sets.push(PackageSet { target, + channel: self.channel.clone(), + idents: self.pkg_ident.clone() }); + } + let mut package_sets_from_file = self.idents_from_file_matches(target)?; + package_sets.append(&mut package_sets_from_file); + package_sets.retain(|set| !set.idents.is_empty()); + + download::start(ui, + &self.bldr_url.to_string(), + PRODUCT, + VERSION, + &package_sets, + self.download_directory.as_ref(), + auth_token.as_ref().map(|x| x.as_str()), + self.verify, + self.ignore_missing_seed).await + } + + fn idents_from_file_matches(&self, target: PackageTarget) -> HabResult> { + let mut sources: Vec = Vec::new(); + + if !self.pkg_ident_file.is_empty() { + for f in &self.pkg_ident_file { + if is_toml_file(f) { + let file_data = std::fs::read_to_string(f)?; + let toml_data: PackageSetFile = + toml::from_str(&file_data).map_err(HabitatCommonError::TomlParser)?; + sources.append(&mut toml_data.to_package_sets()?); + } else { + let idents_from_file = file_into_idents(f)?; + let package_set = PackageSet { idents: idents_from_file, + channel: self.channel.clone(), + target }; + sources.push(package_set) + } + } + } + Ok(sources) + } +} + +#[cfg(test)] +mod tests { + use super::{PackageTarget, + Parser, + PkgDownloadOptions}; + use std::{collections::HashMap, + path::Path}; + + #[test] + fn test_package_sets_from_file_e2e_tests_toml() { + let mut toml_files_map = HashMap::::new(); + toml_files_map.insert("bad_header.toml".to_string(), false); + toml_files_map.insert("bad_ident.toml".to_string(), false); + toml_files_map.insert("bad_target.toml".to_string(), false); + toml_files_map.insert("no_header.toml".to_string(), false); + toml_files_map.insert("no_target.toml".to_string(), true); + toml_files_map.insert("happy_path.toml".to_string(), true); + + let tomls_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let tomls_dir = Path::new(&tomls_dir).join("../../test/end-to-end/fixtures/pkg_download/"); + assert!(tomls_dir.is_dir()); + + let no_header_toml_string = "no_header.toml".to_string(); + let _ = toml_files_map.get(&no_header_toml_string); + for toml in tomls_dir.read_dir().unwrap() { + if let Ok(toml) = toml { + let key = toml.file_name().into_string().unwrap(); + let path = toml.path().into_os_string().into_string(); + eprintln!("{}: {:#?}", key, path); + if let Ok(path) = path { + let args = ["download", "--file", &path]; + let result = PkgDownloadOptions::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let pkg_download = result.unwrap(); + let result = + pkg_download.idents_from_file_matches(PackageTarget::active_target()); + let should_be_ok = toml_files_map.get(&key).unwrap(); + assert_eq!(result.is_ok(), + *should_be_ok, + "{}: {:#?}", + key, + result.err().unwrap()); + } + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/env.rs b/components/hab/src/cli_v4/pkg/env.rs new file mode 100644 index 0000000000..32f58ba245 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/env.rs @@ -0,0 +1,30 @@ +// Implementation of `hab pkg env` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::command::pkg::env; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgEnvOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgEnvOptions { + pub(super) fn do_env(&self) -> HabResult<()> { + env::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/exec.rs b/components/hab/src/cli_v4/pkg/exec.rs new file mode 100644 index 0000000000..d8026d9759 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/exec.rs @@ -0,0 +1,43 @@ +// Implementation of `hab pkg exec` command + +use clap_v4 as clap; + +use std::{ffi::OsString, + path::PathBuf}; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::package::PackageIdent; + +use crate::{command::pkg::exec, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgExecOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// The command to execute (ex: ls) + #[arg(name = "CMD")] + cmd: PathBuf, + + /// Arguments to be passed to the command + #[arg(name = "ARGS")] + args: Vec, +} + +impl PkgExecOptions { + pub(super) fn do_exec(&self) -> HabResult<()> { + // Required to convert to OsStr + // TODO: This should be internal implementation detail later on and move to actual command + // implementation when `v2` is removed + let args = self.args.iter().map(Into::into).collect::>(); + exec::start(&self.pkg_ident, &self.cmd, &args) + } +} diff --git a/components/hab/src/cli_v4/pkg/export.rs b/components/hab/src/cli_v4/pkg/export.rs new file mode 100644 index 0000000000..2f99e042ab --- /dev/null +++ b/components/hab/src/cli_v4/pkg/export.rs @@ -0,0 +1,72 @@ +// Implementation of `hab pkg export` command + +use std::ffi::OsString; + +use clap_v4 as clap; + +use clap::{Args, + Subcommand}; + +use habitat_common::ui::{UIWriter, + UI}; + +use crate::{command::pkg::export, + error::Result as HabResult}; + +#[derive(Debug, Clone, Args)] +pub(crate) struct PkgExportCommandOptions { + /// Arguments to be passed to the command + #[arg(name = "ARGS")] + args: Vec, +} + +#[derive(Debug, Clone, Subcommand)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) enum PkgExportCommand { + /// Container Exporter + #[cfg(any(target_os = "linux", target_os = "windows"))] + Container(PkgExportCommandOptions), + + #[cfg(any(target_os = "linux", target_os = "windows"))] + #[command(hide = true)] + Docker(PkgExportCommandOptions), + + /// Tar Exporter + #[cfg(any(target_os = "linux", target_os = "windows"))] + Tar(PkgExportCommandOptions), +} + +impl PkgExportCommand { + pub(super) async fn do_export(&self, ui: &mut UI) -> HabResult<()> { + match self { + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Container(opts) => { + export::container::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Docker(opts) => { + ui.warn("'hab pkg export docker' is now a deprecated alias for 'hab pkg export \ + container'. Please update your automation and processes accordingly.")?; + export::container::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Tar(opts) => { + export::tar::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/hash.rs b/components/hab/src/cli_v4/pkg/hash.rs new file mode 100644 index 0000000000..c243919402 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/hash.rs @@ -0,0 +1,44 @@ +// Implementation of `hab pkg hash` command + +use clap_v4 as clap; + +use std::io::BufRead; + +use clap::Parser; + +use habitat_common::cli::clap_validators::FileExistsValueParser; + +use crate::command::pkg::hash; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgHashOptions { + /// Filepath to the Habitat Package file + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: Option, /* TODO: Convert it to more semantic `PathBuf`, when we get rid of + * `clap-v2` functionality, revisit `command::pkg::hash` */ +} + +impl PkgHashOptions { + pub(super) fn do_hash(&self) -> HabResult<()> { + match &self.source { + Some(source) => { + // hash single file + hash::start(source.as_str()) + } + None => { + // read files from stdin + let stdin = std::io::stdin(); + for line in stdin.lock().lines() { + let file = line?; + hash::start(file.trim_end())?; + } + Ok(()) + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/header.rs b/components/hab/src/cli_v4/pkg/header.rs new file mode 100644 index 0000000000..4e18f5d4d5 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/header.rs @@ -0,0 +1,32 @@ +// Implementation of `hab pkg header` command +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::crypto; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{command::pkg::header, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgHeaderOptions { + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, +} + +impl PkgHeaderOptions { + pub(super) fn do_header(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + + header::start(ui, &PathBuf::from(&self.source)) + } +} diff --git a/components/hab/src/cli_v4/pkg/info.rs b/components/hab/src/cli_v4/pkg/info.rs new file mode 100644 index 0000000000..80467ad8bb --- /dev/null +++ b/components/hab/src/cli_v4/pkg/info.rs @@ -0,0 +1,41 @@ +// Implementation of `hab pkg info` command +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_core::crypto; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{command::pkg::info, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgInfoOptions { + /// Output will be rendered in json. (Includes extended metadata) + #[arg(name = "TO_JSON", + short = 'j', + long = "json", + action = ArgAction::SetTrue)] + json: bool, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, +} + +impl PkgInfoOptions { + pub(super) fn do_info(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + + info::start(ui, &Into::::into(self.source.clone()), self.json) + } +} diff --git a/components/hab/src/cli_v4/pkg/install.rs b/components/hab/src/cli_v4/pkg/install.rs new file mode 100644 index 0000000000..d9ae373c4c --- /dev/null +++ b/components/hab/src/cli_v4/pkg/install.rs @@ -0,0 +1,153 @@ +// Implementation of `hab pkg install` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{builder::NonEmptyStringValueParser, + parser::ValueSource, + ArgAction, + CommandFactory, + Parser}; + +use habitat_core::{env::Config, + fs::{cache_artifact_path, + FS_ROOT_PATH}, + ChannelIdent}; + +use habitat_common::{cli::{BINLINK_DIR_ENVVAR, + DEFAULT_BINLINK_DIR}, + command::package::install::{self, + InstallHookMode, + InstallMode, + InstallSource, + LocalPackageUsage}, + ui::UI, + FeatureFlag, + FEATURE_FLAGS}; + +use crate::{command::pkg::binlink, + error::Result as HabResult, + PRODUCT, + VERSION}; + +use crate::cli_v4::utils::{AuthToken, + BldrUrl}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + rename_all = "screaming_snake", + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgInstallOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// Install from the specified release channel + #[arg(short = 'c', + long = "channel", + default_value = "stable", + env = ChannelIdent::ENVVAR)] + channel: ChannelIdent, + + /// One or more Habitat package identifiers (ex: acme/redis) and/or filepaths to a Habitat + /// Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(required = true)] + pkg_ident_or_artifact: Vec, + + /// Binlink all binaries from installed package(s) into BINLINK_DIR + #[arg(short = 'b', long = "binlink")] + binlink: bool, + + /// Binlink all binaries from installed package(s) into BINLINK_DIR + #[arg(long = "binlink-dir", + default_value = DEFAULT_BINLINK_DIR, + env = BINLINK_DIR_ENVVAR, value_parser = NonEmptyStringValueParser::new())] + binlink_dir: String, + + /// Overwrite existing binlinks + #[arg(short = 'f', long = "force", action = ArgAction::SetTrue)] + force: bool, + + #[command(flatten)] + auth_token: AuthToken, + + /// Do not run any install hooks + #[arg(long = "ignore-install-hook", action = ArgAction::SetTrue)] + ignore_install_hook: bool, + + /// Install packages in offline mode + #[arg(long = "offline", + action = ArgAction::SetTrue, + hide = !FEATURE_FLAGS.contains(FeatureFlag::OFFLINE_INSTALL))] + offline: bool, + + /// Do not use locally-installed packages when a corresponding package cannot be installed + /// from Builder + #[arg(long = "ignore-local", + action = ArgAction::SetTrue, + )] + ignore_local: bool, +} + +impl PkgInstallOptions { + pub(crate) async fn do_install(&self, + ui: &mut UI, + feature_flags: FeatureFlag) + -> HabResult<()> { + let pkg_install_args: Vec<_> = std::env::args_os().skip(2).collect(); + + let auth_token = self.auth_token.try_from_cli_or_config(); + + let install_mode = if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && self.offline { + InstallMode::Offline + } else { + InstallMode::default() + }; + + let local_package_usage = if self.ignore_local { + LocalPackageUsage::Ignore + } else { + LocalPackageUsage::default() + }; + + let install_hook_mode = if !self.ignore_install_hook { + InstallHookMode::Ignore + } else { + InstallHookMode::default() + }; + + let matches = Self::command().get_matches_from(pkg_install_args); + let do_binlink = match matches.value_source("binlink_dir") { + Some(ValueSource::CommandLine) => true, + _ => self.binlink, + }; + + for install_source in &self.pkg_ident_or_artifact { + // let install_source = InstallSource::from_str(install_source)?; + let pkg_install = install::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &install_source, + PRODUCT, + VERSION, + &FS_ROOT_PATH, + &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), + auth_token.as_deref(), + &install_mode, + &local_package_usage, + install_hook_mode).await?; + + if do_binlink { + let binlink_dir = PathBuf::from(&self.binlink_dir); + binlink::binlink_all_in_pkg(ui, + pkg_install.ident(), + &binlink_dir, + &FS_ROOT_PATH, + self.force)?; + } + } + + Ok(()) + } +} diff --git a/components/hab/src/cli_v4/pkg/list.rs b/components/hab/src/cli_v4/pkg/list.rs new file mode 100644 index 0000000000..9f8faac002 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/list.rs @@ -0,0 +1,51 @@ +// Implemenatation of `hab pkg list` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::package::PackageIdent; + +use habitat_common::cli::clap_validators::HabOriginValueParser; + +use crate::{command::pkg::{list, + list::ListingType}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +#[group(required = true, multiple = false)] +pub(crate) struct PkgListOptions { + /// List all installed packages + #[arg(name = "ALL", short = 'a', long = "all")] + all: bool, + + // TODO : Validations + /// An origin to list + #[arg(name = "ORIGIN", short = 'o', long = "origin", value_parser = HabOriginValueParser)] + origin: Option, + + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT")] + pkg_ident: Option, +} + +impl PkgListOptions { + pub(super) fn do_list(&self) -> HabResult<()> { list::start(&self.into()) } +} + +impl From<&PkgListOptions> for ListingType { + fn from(opts: &PkgListOptions) -> Self { + if opts.all { + ListingType::AllPackages + } else if let Some(origin) = &opts.origin { + ListingType::Origin(origin.clone()) + } else if let Some(ident) = &opts.pkg_ident { + ListingType::Ident(ident.clone()) + } else { + unreachable!(); + } + } +} diff --git a/components/hab/src/cli_v4/pkg/path.rs b/components/hab/src/cli_v4/pkg/path.rs new file mode 100644 index 0000000000..66b5b17f35 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/path.rs @@ -0,0 +1,27 @@ +// Implementation of `hab pkg path` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::{command::pkg::path, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgPathOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgPathOptions { + pub(super) fn do_path(&self) -> HabResult<()> { path::start(&self.pkg_ident, &FS_ROOT_PATH) } +} diff --git a/components/hab/src/cli_v4/pkg/promote.rs b/components/hab/src/cli_v4/pkg/promote.rs new file mode 100644 index 0000000000..d58e3590d7 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/promote.rs @@ -0,0 +1,63 @@ +// Implementation of `hab pkg promote` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::promote, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgPromoteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// Promote to the specified release channel + #[arg(name = "CHANNEL")] + channel: ChannelIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgPromoteOptions { + pub(super) async fn do_promote(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + promote::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &self.channel, + auth_token.as_str()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/provides.rs b/components/hab/src/cli_v4/pkg/provides.rs new file mode 100644 index 0000000000..ecb04cd270 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/provides.rs @@ -0,0 +1,38 @@ +// Implementation of `hab pkg provides` command + +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_core::fs::FS_ROOT_PATH; + +use crate::{command::pkg::provides, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgProvidesOptions { + /// File name to find + #[arg(name = "FILE")] + file: String, + + /// Show fully qualified package names (ex: core/busybox-static/1.24.2/20160708162350) + #[arg(name = "FULL_RELEASES", short = 'r', action = ArgAction::SetTrue)] + full_releases: bool, + + /// Show full path to file + #[arg(name = "FULL_PATHS", short = 'p', action = ArgAction::SetTrue)] + full_paths: bool, +} + +impl PkgProvidesOptions { + pub(super) fn do_provides(&self) -> HabResult<()> { + provides::start(&self.file, + &FS_ROOT_PATH, + self.full_releases, + self.full_paths) + } +} diff --git a/components/hab/src/cli_v4/pkg/search.rs b/components/hab/src/cli_v4/pkg/search.rs new file mode 100644 index 0000000000..b3e6045679 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/search.rs @@ -0,0 +1,41 @@ +// Implementation of `hab pkg search` command + +use clap_v4 as clap; + +use clap::Parser; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::search, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgSearchOptions { + /// Search term + #[arg(name = "SEARCH_TERM")] + search_term: String, + + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Limit how many packages to retrieve + #[arg(name = "LIMIT", short = 'l', long = "limit", default_value_t = 50)] + limit: usize, +} + +impl PkgSearchOptions { + pub(crate) async fn do_search(&self) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + search::start(&self.search_term, + &self.bldr_url.to_string(), + self.limit, + auth_token.as_deref()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/sign.rs b/components/hab/src/cli_v4/pkg/sign.rs new file mode 100644 index 0000000000..6365303eed --- /dev/null +++ b/components/hab/src/cli_v4/pkg/sign.rs @@ -0,0 +1,65 @@ +// Implementation of `hab pkg sign` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::{crypto, + crypto::keys::KeyCache, + origin::Origin}; + +use habitat_common::{cli::clap_validators::{FileExistsValueParser, + HabOriginValueParser}, + cli_config::CliConfig, + ui::UI}; + +use crate::{cli_v4::utils::CacheKeyPath, + command::pkg::sign, + error::{Error as HabError, + Result as HabResult}}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgSignOptions { + /// Origin key used to create signature + #[arg(name = "ORIGIN", long = "origin", env=crate::ORIGIN_ENVVAR, value_parser = HabOriginValueParser)] + origin: Option, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// A path to a source archive file (ex: /home/acme-redis-3.0.7-21120102031201.tar.xz) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, + + /// The destination path to the signed Habitat Artifact (ex: + /// /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "DEST")] + dest: PathBuf, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgSignOptions { + pub(crate) fn do_sign(&self, ui: &mut UI) -> HabResult<()> { + let origin = match &self.origin { + Some(origin) => origin.clone(), + None => { + CliConfig::load()?.origin.ok_or_else(|| { + HabError::CryptoCLI("No origin specified".to_string()) + })? + } + }; + + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + let key = key_cache.latest_secret_origin_signing_key(&origin)?; + sign::start(ui, + &key, + &Into::::into(self.source.clone()), + &self.dest) + } +} diff --git a/components/hab/src/cli_v4/pkg/uninstall.rs b/components/hab/src/cli_v4/pkg/uninstall.rs new file mode 100644 index 0000000000..1aef323a22 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/uninstall.rs @@ -0,0 +1,81 @@ +// Implementation of `hab pkg uninstall` command +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + ui::UI}; + +use crate::{command::pkg::{uninstall, + uninstall::UninstallHookMode, + ExecutionStrategy, + Scope}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgUninstallOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// Just show what would be uninstalled, don't actually do it + #[arg(name = "DRYRUN", short = 'd', long = "dryrun", action = ArgAction::SetTrue)] + dryrun: bool, + + /// Only keep this number of latest packages uninstalling all others. + #[arg(name = "KEEP_LATEST", long = "keep-latest")] + keep_latest: Option, + + /// Identifier of one or more packages that should not be uninstalled. (ex: core/redis, + /// core/busybox-static/1.42.2/21120102031201) + #[arg(name = "EXCLUDE", long = "exclude")] + exclude: Vec, + + /// Don't uninstall dependencies + #[arg(name = "NO_DEPS", long = "no-deps")] + no_deps: bool, + + /// Do not run any uninstall hooks + #[arg(name = "IGNORE_UNINSTALL_HOOK", long = "ignore-uninstall-hook")] + ignore_uninstall_hook: bool, +} + +impl PkgUninstallOptions { + pub(crate) async fn do_uninstall(&self, ui: &mut UI) -> HabResult<()> { + let exec_strategy = if self.dryrun { + ExecutionStrategy::DryRun + } else { + ExecutionStrategy::Run + }; + + let uninstall_mode = self.keep_latest.into(); + + let scope = if self.no_deps { + Scope::Package + } else { + Scope::PackageAndDependencies + }; + + let uninstall_hook_mode = if self.ignore_uninstall_hook { + UninstallHookMode::Ignore + } else { + UninstallHookMode::default() + }; + + uninstall::start(ui, + &self.pkg_ident, + &FS_ROOT_PATH, + exec_strategy, + uninstall_mode, + scope, + &self.exclude, + uninstall_hook_mode).await + } +} diff --git a/components/hab/src/cli_v4/pkg/upload.rs b/components/hab/src/cli_v4/pkg/upload.rs new file mode 100644 index 0000000000..f22b134d2a --- /dev/null +++ b/components/hab/src/cli_v4/pkg/upload.rs @@ -0,0 +1,83 @@ +// Implementation of `hab pkg upload` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use habitat_core::{crypto::keys::KeyCache, + ChannelIdent}; + +use habitat_api_client::BuildOnUpload; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl, + CacheKeyPath}, + command::pkg::upload, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgUploadOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Optional additional release channel to upload package to. Packages are always uploaded + /// to `unstable`, regardless of the value of this option + #[arg(name = "CHANNEL", short = 'c', long = "channel")] + channel: Option, + + /// Skips checking availability of package and force uploads, potentially overwriting a + /// stored copy of a package. (default: false) + #[arg(name = "FORCE", long = "force", action = ArgAction::SetTrue)] + force: bool, + + /// Disable auto-build for all packages in this upload + #[arg(name = "NO_BUILD", long = "no-build", action = ArgAction::SetTrue)] + no_build: bool, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// One or more filepaths to a Habitat Artifact (ex: + /// /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "HART_FILE", required = true, value_parser = FileExistsValueParser)] + hart_file: Vec, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgUploadOptions { + pub(crate) async fn do_upload(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let auto_build = if self.no_build { + BuildOnUpload::Disable + } else { + BuildOnUpload::PackageDefault + }; + + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + + for hart_file in &self.hart_file { + upload::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &auth_token, + &Into::::into(hart_file.clone()), + self.force, + auto_build, + &key_cache).await?; + } + Ok(()) + } +} diff --git a/components/hab/src/cli_v4/pkg/verify.rs b/components/hab/src/cli_v4/pkg/verify.rs new file mode 100644 index 0000000000..9a8d24816e --- /dev/null +++ b/components/hab/src/cli_v4/pkg/verify.rs @@ -0,0 +1,40 @@ +// Implementation of `hab pkg verify` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::{crypto, + crypto::keys::KeyCache}; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{cli_v4::utils::CacheKeyPath, + command::pkg::verify, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgVerifyOptions { + // TODO: Move to semantic PathBuf once Clap-v2 is removed + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgVerifyOptions { + pub(super) fn do_verify(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + + verify::start(ui, &Into::::into(self.source.clone()), &key_cache) + } +} diff --git a/components/hab/src/cli_v4/utils.rs b/components/hab/src/cli_v4/utils.rs new file mode 100644 index 0000000000..7db5e0aeff --- /dev/null +++ b/components/hab/src/cli_v4/utils.rs @@ -0,0 +1,306 @@ +// Utilities that are used by v4 macros +// +// Note we are duplicating this functionality because trivially using +// `cfg_attr(feature = "v4"),...]` is not easy to make work with existing code. Eventually this +// will be the only `util` left (hope so) + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; +use lazy_static::lazy_static; +use url::Url; + +use habitat_common::cli_config::CliConfig; + +use habitat_core::{crypto::CACHE_KEY_PATH_ENV_VAR, + env as hcore_env, + fs::CACHE_KEY_PATH, + url::{BLDR_URL_ENVVAR, + DEFAULT_BLDR_URL}, + AUTH_TOKEN_ENVVAR}; + +use crate::error::{Error as HabError, + Result as HabResult}; + +lazy_static! { + pub(crate) static ref CACHE_KEY_PATH_DEFAULT: String = + CACHE_KEY_PATH.to_string_lossy().to_string(); +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct CacheKeyPath { + /// Cache for creating and searching for encryption keys + #[arg(long = "cache-key-path", + env = CACHE_KEY_PATH_ENV_VAR, + default_value = &*CACHE_KEY_PATH_DEFAULT)] + pub(crate) cache_key_path: PathBuf, +} + +impl From for CacheKeyPath { + fn from(cache_key_path: PathBuf) -> Self { Self { cache_key_path } } +} + +impl From<&CacheKeyPath> for PathBuf { + fn from(cache_key_path: &CacheKeyPath) -> PathBuf { cache_key_path.cache_key_path.clone() } +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct BldrUrl { + // TODO:agadgil: Use the Url Validator + /// Specify an alternate Builder endpoint. + #[arg(name = "BLDR_URL", short = 'u', long = "url")] + bldr_url: Option, +} + +impl BldrUrl { + // + pub(crate) fn to_string(&self) -> String { + if let Some(url) = &self.bldr_url { + url.to_string() + } else { + match hcore_env::var(BLDR_URL_ENVVAR) { + Ok(v) => v, + Err(_) => { + // Okay to unwrap it never returns Err!! + match CliConfig::load().unwrap().bldr_url { + Some(v) => v, + None => DEFAULT_BLDR_URL.to_string(), + } + } + } + } + } +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct AuthToken { + // TODO: Add Validator for this? + /// Authentication token for Builder. + #[arg(name = "AUTH_TOKEN", short = 'z', long = "auth")] + auth_token: Option, +} + +impl AuthToken { + // This function returns a result. Use this when `auth_token` is required. Either as a command + // line option or env or from config. + pub(crate) fn from_cli_or_config(&self) -> HabResult { + if let Some(auth_token) = &self.auth_token { + Ok(auth_token.clone()) + } else { + match hcore_env::var(AUTH_TOKEN_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + CliConfig::load()?.auth_token.ok_or_else(|| { + HabError::ArgumentError("No auth token \ + specified" + .into()) + }) + } + } + } + } + + // This function returns an `Option`, so if there is any "error" reading from config or env is + // not set simply returns a None. + pub(crate) fn try_from_cli_or_config(&self) -> Option { + match self.from_cli_or_config() { + Ok(v) => Some(v), + Err(_) => None, + } + } +} + +#[cfg(test)] +mod tests { + mod auth_token { + + use crate::cli_v4::utils::AuthToken; + + use clap_v4 as clap; + + use clap::Parser; + + habitat_core::locked_env_var!(HAB_AUTH_TOKEN, locked_auth_token); + + #[derive(Debug, Clone, Parser)] + struct TestAuthToken { + #[command(flatten)] + a: AuthToken, + } + + #[test] + fn required_env_no_cli_success() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_ok(), "{:#?}", auth_token.err().unwrap()); + } + + #[test] + fn required_no_env_cli_success() { + let env_var = locked_auth_token(); + env_var.unset(); + + let args = ["test-auth-token", "--auth", "foo-bar"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + } + + #[test] + fn required_no_env_no_cli_error() { + let env_var = locked_auth_token(); + env_var.unset(); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_err(), "{:#?}", auth_token.ok().unwrap()); + } + + #[test] + fn required_empty_env_no_cli_error() { + let env_var = locked_auth_token(); + env_var.set(""); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_err(), "{:#?}", auth_token.ok().unwrap()); + } + #[test] + fn optional_empty_env_no_cli_none() { + let env_var = locked_auth_token(); + env_var.set(""); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert!(auth_token.is_none(), "{:#?}", auth_token.unwrap()); + } + + #[test] + fn tok_optional_from_env_no_cli_some() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert_eq!(Some("env-auth-token".to_string()), + auth_token, + "{:#?}", + auth_token); + } + + #[test] + fn optional_no_env_from_cli_some() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token", "--auth", "foo-bar"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert_eq!(Some("foo-bar".to_string()), auth_token, "{:#?}", auth_token); + } + } + + mod bldr_url { + + use crate::cli_v4::utils::{BldrUrl, + DEFAULT_BLDR_URL}; + + use clap_v4 as clap; + + use clap::Parser; + + habitat_core::locked_env_var!(HAB_BLDR_URL, locked_bldr_url); + + #[derive(Debug, Clone, Parser)] + struct TestBldrUrl { + #[command(flatten)] + u: BldrUrl, + } + + #[test] + fn no_env_no_cli_default() { + let env_var = locked_bldr_url(); + env_var.unset(); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), DEFAULT_BLDR_URL, "{:#?}", bldr_url); + } + + #[test] + fn empty_env_no_cli_default() { + let env_var = locked_bldr_url(); + env_var.set(""); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), DEFAULT_BLDR_URL, "{:#?}", bldr_url); + } + + #[test] + fn env_cli_passed_value() { + let test_bldr_url_val = "https://test.bldr.habitat.sh/"; + let cli_bldr_url_val = "https://cli.bldr.habitat.sh/"; + let env_var = locked_bldr_url(); + env_var.set(test_bldr_url_val); + + let args = ["test-bldr-url", "--url", cli_bldr_url_val]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), cli_bldr_url_val, "{:#?}", bldr_url); + } + + #[test] + fn env_no_cli_env_value() { + let test_bldr_url_val = "https://test.bldr.habitat.sh/"; + let env_var = locked_bldr_url(); + env_var.set(test_bldr_url_val); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), test_bldr_url_val, "{:#?}", bldr_url); + } + } +} diff --git a/components/hab/src/command/launcher.rs b/components/hab/src/command/launcher.rs index 7bbc325edb..67561894b7 100644 --- a/components/hab/src/command/launcher.rs +++ b/components/hab/src/command/launcher.rs @@ -1,5 +1,4 @@ -use crate::{cli::hab::sup::SupRun, - command::sup::{SUP_CMD, +use crate::{command::sup::{SUP_CMD, SUP_CMD_ENVVAR, SUP_PKG_IDENT}, common::ui::UI, @@ -12,14 +11,19 @@ use crate::{cli::hab::sup::SupRun, os::process, package::PackageIdent}, VERSION}; + use std::{ffi::OsString, path::PathBuf, str::FromStr}; +#[cfg(feature = "v2")] +use crate::cli::hab::sup::SupRun; + const LAUNCH_CMD: &str = "hab-launch"; const LAUNCH_CMD_ENVVAR: &str = "HAB_LAUNCH_BINARY"; const LAUNCH_PKG_IDENT: &str = "core/hab-launcher"; +#[cfg(feature = "v2")] pub async fn start(ui: &mut UI, sup_run: SupRun, args: &[OsString]) -> Result<()> { init()?; let channel = sup_run.shared_load.channel; diff --git a/components/hab/src/command/origin/key/export.rs b/components/hab/src/command/origin/key/export.rs index e9d6ceaf87..1b3d624ab1 100644 --- a/components/hab/src/command/origin/key/export.rs +++ b/components/hab/src/command/origin/key/export.rs @@ -1,5 +1,5 @@ -use crate::{cli::KeyType, - error::Result}; +use crate::{error::Result, + key_type::KeyType}; use habitat_core::{crypto::keys::{KeyCache, KeyFile}, origin::Origin}; diff --git a/components/hab/src/command/pkg/download.rs b/components/hab/src/command/pkg/download.rs index 1f4c712f80..7704a8110b 100644 --- a/components/hab/src/command/pkg/download.rs +++ b/components/hab/src/command/pkg/download.rs @@ -31,7 +31,8 @@ use std::{collections::{HashMap, HashSet}, fs::DirBuilder, path::{Path, - PathBuf}}; + PathBuf}, + str::FromStr}; use crate::{api_client::{self, retry_builder_api, @@ -78,6 +79,43 @@ pub struct PackageSetFile { pub targets: HashMap>, } +// TODO: Remove this clippy allow once `v2` support is removed. +#[allow(dead_code)] +impl PackageSetFile { + // Get Package Sets from the `toml` data. Following validations are performed - + // format_version is 1 + pub(crate) fn to_package_sets(&self) -> Result> { + match self.format_version { + Some(version) => { + if version != 1 { + Err(Error::PackageSetParseError(format!("format_version \ + invalid, only \ + version 1 allowed \ + ({} provided", + self.format_version + .unwrap()))) + } else { + let mut sets = vec![]; + for (target, pkg_sets) in &self.targets { + for pkg_set in pkg_sets { + let mut idents = vec![]; + for package in &pkg_set.packages { + let ident = PackageIdent::from_str(package).map_err(Error::from)?; + idents.push(ident); + } + sets.push(PackageSet { target: *target, + channel: pkg_set.channel.clone(), + idents }); + } + } + Ok(sets) + } + } + None => Err(Error::PackageSetParseError("format_version missing!".to_string())), + } + } +} + #[derive(Debug, Deserialize)] pub struct PackageSetValue { pub channel: ChannelIdent, diff --git a/components/hab/src/command/pkg/export.rs b/components/hab/src/command/pkg/export.rs index a3c1b51888..02d36451f5 100644 --- a/components/hab/src/command/pkg/export.rs +++ b/components/hab/src/command/pkg/export.rs @@ -1,5 +1,3 @@ -pub mod cf; pub mod container; mod export_common; -pub mod mesos; pub mod tar; diff --git a/components/hab/src/command/pkg/export/cf.rs b/components/hab/src/command/pkg/export/cf.rs deleted file mode 100644 index c2625b86d1..0000000000 --- a/components/hab/src/command/pkg/export/cf.rs +++ /dev/null @@ -1,17 +0,0 @@ -use crate::{common::ui::UI, - error::Result}; -use std::ffi::OsString; - -// It would be more consistent naming to use "export cf" instead of "cfize", but for backwards -// compatibility we keep "cfize" -const EXPORT_CMD_ENVVAR: &str = "HAB_PKG_CFIZE_BINARY"; -const EXPORT_PKG_IDENT_ENVVAR: &str = "HAB_PKG_CFIZE_PKG_IDENT"; -const EXPORT_CMD: &str = "hab-pkg-cfize"; - -pub async fn start(ui: &mut UI, args: &[OsString]) -> Result<()> { - crate::command::pkg::export::export_common::start(ui, - args, - EXPORT_CMD_ENVVAR, - EXPORT_PKG_IDENT_ENVVAR, - EXPORT_CMD).await -} diff --git a/components/hab/src/command/pkg/export/mesos.rs b/components/hab/src/command/pkg/export/mesos.rs deleted file mode 100644 index 5633c59e2f..0000000000 --- a/components/hab/src/command/pkg/export/mesos.rs +++ /dev/null @@ -1,17 +0,0 @@ -use crate::{common::ui::UI, - error::Result}; -use std::ffi::OsString; - -// It would be more consistent naming to use "export mesos" instead of "mesoize", but for backwards -// compatibility we keep "mesoize" -const EXPORT_CMD_ENVVAR: &str = "HAB_PKG_MESOSIZE_BINARY"; -const EXPORT_PKG_IDENT_ENVVAR: &str = "HAB_PKG_MESOSIZE_PKG_IDENT"; -const EXPORT_CMD: &str = "hab-pkg-mesosize"; - -pub async fn start(ui: &mut UI, args: &[OsString]) -> Result<()> { - crate::command::pkg::export::export_common::start(ui, - args, - EXPORT_CMD_ENVVAR, - EXPORT_PKG_IDENT_ENVVAR, - EXPORT_CMD).await -} diff --git a/components/hab/src/command/pkg/list.rs b/components/hab/src/command/pkg/list.rs index de14bfbf83..863c79db1a 100644 --- a/components/hab/src/command/pkg/list.rs +++ b/components/hab/src/command/pkg/list.rs @@ -3,7 +3,10 @@ use crate::{error::Result, FS_ROOT_PATH}, package::{list, PackageIdent}}}; + +#[cfg(feature = "v2")] use clap::ArgMatches; + use std::str::FromStr; /// There are three options for what we can list: @@ -18,6 +21,7 @@ pub enum ListingType { Ident(PackageIdent), } +#[cfg(feature = "v2")] /// Convert a set of command line options into a ListingType impl<'a> From<&'a ArgMatches<'a>> for ListingType { /// Convert clap options into a listing type. diff --git a/components/hab/src/command/pkg/uninstall.rs b/components/hab/src/command/pkg/uninstall.rs index e104e3f328..165dc5d4ad 100644 --- a/components/hab/src/command/pkg/uninstall.rs +++ b/components/hab/src/command/pkg/uninstall.rs @@ -3,7 +3,6 @@ mod uninstall_impl; use super::{ExecutionStrategy, Scope}; use crate::error::Result; -use clap::ArgMatches; use habitat_common::ui::UI; use habitat_core::package::PackageIdent; use std::path::Path; @@ -19,6 +18,20 @@ pub enum UninstallMode { KeepLatest(usize), } +impl From> for UninstallMode { + fn from(keep_latest: Option) -> Self { + match keep_latest { + Some(keep_latest) => Self::KeepLatest(keep_latest), + None => Self::Single, + } + } +} + +// TODO: Remove after feature `v2` is removed +#[cfg(feature = "v2")] +use clap::ArgMatches; + +#[cfg(feature = "v2")] impl<'a> From<&'a ArgMatches<'a>> for UninstallMode { fn from(m: &ArgMatches) -> Self { m.value_of("KEEP_LATEST") diff --git a/components/hab/src/error.rs b/components/hab/src/error.rs index 37d5c792f2..9dda3a46a8 100644 --- a/components/hab/src/error.rs +++ b/components/hab/src/error.rs @@ -34,7 +34,10 @@ pub enum Error { CannotRemovePackage(hcore::package::PackageIdent, usize), CommandNotFoundInPkg((String, String)), CliConfig(cli_config::Error), + + #[cfg(feature = "v2")] ConfigOpt(configopt::Error), + CryptoCLI(String), CtlClient(SrvClientError), CtrlcError(ctrlc::Error), @@ -112,7 +115,11 @@ impl fmt::Display for Error { c, p) } Error::CliConfig(ref err) => format!("{}", err), + + // TODO: Remove after `v2` is removed + #[cfg(feature = "v2")] Error::ConfigOpt(ref err) => format!("{}", err), + Error::CryptoCLI(ref e) => e.to_string(), Error::CtlClient(ref e) => e.to_string(), Error::CtrlcError(ref err) => format!("{}", err), @@ -234,6 +241,7 @@ impl From for Error { fn from(err: cli_config::Error) -> Self { Error::CliConfig(err) } } +#[cfg(feature = "v2")] impl From for Error { fn from(err: configopt::Error) -> Self { Error::ConfigOpt(err) } } diff --git a/components/hab/src/key_type.rs b/components/hab/src/key_type.rs new file mode 100644 index 0000000000..a9f4112b3c --- /dev/null +++ b/components/hab/src/key_type.rs @@ -0,0 +1,35 @@ +use std::str::FromStr; + +use serde::{Deserialize, + Serialize}; + +//////////////////////////////////////////////////////////////////////// + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Serialize)] +pub enum KeyType { + Public, + Secret, +} + +impl FromStr for KeyType { + type Err = crate::error::Error; + + fn from_str(value: &str) -> std::result::Result { + match value { + "public" => Ok(Self::Public), + "secret" => Ok(Self::Secret), + _ => Err(Self::Err::KeyTypeParseError(value.to_string())), + } + } +} + +impl std::fmt::Display for KeyType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + KeyType::Public => write!(f, "public"), + KeyType::Secret => write!(f, "secret"), + } + } +} + +//////////////////////////////////////////////////////////////////////// diff --git a/components/hab/src/lib.rs b/components/hab/src/lib.rs index b1ffb8b6ec..f9e05b62a3 100644 --- a/components/hab/src/lib.rs +++ b/components/hab/src/lib.rs @@ -6,7 +6,15 @@ use habitat_core as hcore; use habitat_sup_client as sup_client; use habitat_sup_protocol as protocol; +#[cfg(feature = "v2")] pub mod cli; + +#[cfg(feature = "v4")] +mod cli_v4; + +#[cfg(feature = "v4")] +pub use cli_v4::cli_driver; + pub mod command; pub mod error; mod exec; @@ -17,5 +25,13 @@ pub const PRODUCT: &str = "hab"; pub const VERSION: &str = include_str!(concat!(env!("OUT_DIR"), "/VERSION")); pub const ORIGIN_ENVVAR: &str = "HAB_ORIGIN"; pub const BLDR_URL_ENVVAR: &str = "HAB_BLDR_URL"; +pub const AFTER_HELP: &str = + "\nALIASES:\n apply Alias for: 'config apply'\n install Alias for: 'pkg \ + install'\n run Alias for: 'sup run'\n setup Alias for: 'cli setup'\n \ + start Alias for: 'svc start'\n stop Alias for: 'svc stop'\n term \ + Alias for: 'sup term'\n"; pub use crate::hcore::AUTH_TOKEN_ENVVAR; + +// TODO:agadgil: When Clap v2 support is gone, this should become `pub(crate)` +pub mod key_type; diff --git a/components/hab/src/main.rs b/components/hab/src/main.rs old mode 100755 new mode 100644 index cd0ae718b9..0b5b7b1649 --- a/components/hab/src/main.rs +++ b/components/hab/src/main.rs @@ -1,2184 +1,14 @@ -#![recursion_limit = "128"] +#[cfg(feature = "v2")] +mod main_v2; -use clap::{value_t, - ArgMatches, - ErrorKind as ClapErrorKind, - Shell}; -use configopt::{ConfigOpt, - Error as ConfigOptError}; -use futures::stream::StreamExt; -#[cfg(any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),))] -use hab::cli::hab::pkg::ExportCommand as PkgExportCommand; -use hab::{cli::{self, - gateway_util, - hab::{license::License, - origin::{Rbac, - RbacSet, - RbacShow}, - pkg::PkgExec, - svc::{self, - BulkLoad as SvcBulkLoad, - Load as SvcLoad, - Svc}, - util::{bldr_auth_token_from_args_env_or_load, - bldr_url_from_args_env_load_or_default}, - Hab, - Origin, - Pkg}, - parse_optional_arg, - KeyType}, - command::{self, - pkg::{download::{PackageSet, - PackageSetFile}, - list::ListingType, - uninstall::UninstallHookMode}}, - error::{Error, - Result}, - license, - scaffolding, - AUTH_TOKEN_ENVVAR, - BLDR_URL_ENVVAR, - ORIGIN_ENVVAR, - PRODUCT, - VERSION}; -use habitat_api_client::BuildOnUpload; -use habitat_common::{self as common, - cli::key_cache_from_matches, - cli_config::CliConfig, - command::package::install::{InstallHookMode, - InstallMode, - InstallSource, - LocalPackageUsage}, - types::ResolvedListenCtlAddr, - ui::{self, - Status, - UIWriter, - UI}, - FeatureFlag}; -use habitat_core::{crypto::{init, - keys::{Key, - KeyCache}}, - env::{self as henv, - Config as _}, - fs::{cache_artifact_path, - FS_ROOT_PATH}, - os::process::ShutdownTimeout, - package::{target, - PackageIdent, - PackageTarget}, - service::ServiceGroup, - url::default_bldr_url, - ChannelIdent}; -use habitat_sup_client::{SrvClient, - SrvClientError}; -use habitat_sup_protocol::{self as sup_proto, - codec::*, - net::ErrCode, - types::*}; -use lazy_static::lazy_static; -use log::{debug, - warn}; -use std::{collections::HashMap, - convert::TryFrom, - env, - ffi::OsString, - fs::File, - io::{self, - prelude::*, - Read}, - path::{Path, - PathBuf}, - process, - result, - str::FromStr, - string::ToString, - thread}; -use tabwriter::TabWriter; - -#[cfg(not(target_os = "macos"))] -use hab::cli::hab::sup::{HabSup, - Secret, - Sup}; -#[cfg(not(target_os = "macos"))] -use habitat_core::tls::ctl_gateway as ctl_gateway_tls; -#[cfg(not(target_os = "macos"))] -use webpki::types::DnsName; - -/// Makes the --org CLI param optional when this env var is set -const HABITAT_ORG_ENVVAR: &str = "HAB_ORG"; -/// Makes the --user CLI param optional when this env var is set -const HABITAT_USER_ENVVAR: &str = "HAB_USER"; - -lazy_static! { - static ref STATUS_HEADER: Vec<&'static str> = { - vec!["package", - "type", - "desired", - "state", - "elapsed (s)", - "pid", - "group",] - }; -} +mod main_v4; #[tokio::main] async fn main() { - env_logger::init(); - let mut ui = UI::default_with_env(); - let flags = FeatureFlag::from_env(&mut ui); - if let Err(e) = start(&mut ui, flags).await { - let exit_code = e.exit_code(); - ui.fatal(e).unwrap(); - std::process::exit(exit_code) - } -} - -#[allow(clippy::cognitive_complexity)] -async fn start(ui: &mut UI, feature_flags: FeatureFlag) -> Result<()> { - // We parse arguments with configopt in a separate thread to eliminate - // possible stack overflow crashes at runtime. OSX or a debug Windows build, - // for instance, will crash with our large tree. This is a known issue: - // https://github.com/kbknapp/clap-rs/issues/86 - let child = thread::Builder::new().stack_size(8 * 1024 * 1024) - .spawn(Hab::try_from_args_with_configopt) - .unwrap(); - let hab = child.join().unwrap(); - - if let Ok(Hab::License(License::Accept)) = hab { - license::accept_license(ui)?; - return Ok(()); - } - - // Allow checking version information and displaying command help without accepting the license. - // TODO (DM): To prevent errors in discrepancy between the structopt and cli versions only do - // this when the license has not yet been accepted. When we switch fully to structopt this can - // be completely removed and we should just call `Hab::from_args_with_configopt` which will - // automatically result in this functionality. - if !license::check_for_license_acceptance().unwrap_or_default() - .accepted() - { - if let Err(ConfigOptError::Clap(e)) = &hab { - if e.kind == ClapErrorKind::VersionDisplayed || e.kind == ClapErrorKind::HelpDisplayed { - e.exit() - } - } - } - - // We must manually detect a supervisor version check and call the `hab-sup` binary to get the - // true Supervisor version. - // TODO (DM): This is an ugly consequence of having `hab sup` subcommands handled by both the - // `hab` binary and the `hab-sup` binary. Potential fixes: - // 1. Handle all `hab sup` subcommands with the `hab-sup` binary - // 2. Have a dedicated subcommand for commands handled by the `hab-sup` binary - let mut args = env::args(); - if matches!((args.next().unwrap_or_default().as_str(), - args.next().unwrap_or_default().as_str(), - args.next().unwrap_or_default().as_str()), - (_, "sup", "--version") | (_, "sup", "-V")) - { - return command::sup::start(ui, &args_after_first(2)).await; - } - - license::check_for_license_acceptance_and_prompt(ui)?; - - // Parse and handle commands which have been migrated to use `structopt` here. Once everything - // is migrated to use `structopt` the parsing logic below this using clap directly will be gone. - match hab { - Ok(hab) => { - match hab { - Hab::Origin(Origin::Rbac(action)) => { - match action { - Rbac::Set(rbac_set) => { - return sub_origin_member_role_set(ui, rbac_set).await; - } - Rbac::Show(rbac_show) => { - return sub_origin_member_role_show(ui, rbac_show).await; - } - } - } - #[cfg(not(target_os = "macos"))] - Hab::Run(sup_run) => { - ui.warn("'hab run' as an alias for 'hab sup run' is deprecated. Please \ - update your automation and processes accordingly.")?; - return command::launcher::start(ui, sup_run, &args_after_first(1)).await; - } - #[cfg(any(target_os = "macos", - any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),)))] - Hab::Studio(studio) => { - return command::studio::enter::start(ui, studio.args()).await; - } - #[cfg(not(target_os = "macos"))] - Hab::Sup(sup) => { - match sup { - HabSup::Sup(sup) => { - // These commands are handled by the `hab-sup` or `hab-launch` binaries. - // We need to pass the subcommand that was issued to the underlying - // binary. It is a bit hacky, but to do that we strip off the `hab sup` - // command prefix and pass the rest of the args to underlying binary. - let args = args_after_first(2); - match sup { - #[cfg(any( - all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"), - ))] - Sup::Bash | Sup::Sh => { - return command::sup::start(ui, &args).await; - } - Sup::Term => { - return command::sup::start(ui, &args).await; - } - Sup::Run(sup_run) => { - return command::launcher::start(ui, sup_run, &args).await; - } - } - } - HabSup::Depart { member_id, - remote_sup, } => { - return sub_sup_depart(member_id, remote_sup.inner()).await; - } - HabSup::Secret(secret) => { - match secret { - Secret::Generate => return sub_sup_secret_generate(), - Secret::GenerateTls { subject_alternative_name, - path, } => { - return sub_sup_secret_generate_key(&subject_alternative_name.dns_name()?, - path) - } - } - } - HabSup::Status { pkg_ident, - remote_sup, } => { - ui.warn("'hab sup status' as an alias for 'hab svc status' is \ - deprecated. Please update your automation and processes \ - accordingly.")?; - return sub_svc_status(pkg_ident, remote_sup.inner()).await; - } - HabSup::Restart { remote_sup } => { - return sub_sup_restart(remote_sup.inner()).await; - } - } - } - Hab::Svc(svc) => { - match svc { - Svc::BulkLoad(svc_bulk_load) => { - if feature_flags.contains(FeatureFlag::SERVICE_CONFIG_FILES) { - return sub_svc_bulk_load(svc_bulk_load).await; - } else { - return Err(Error::ArgumentError(String::from("`hab svc bulkload` is only available when `HAB_FEAT_SERVICE_CONFIG_FILES` is set"))); - } - } - Svc::Load(svc_load) => { - return sub_svc_load(svc_load).await; - } - Svc::Update(svc_update) => return sub_svc_update(svc_update).await, - Svc::Status(svc_status) => { - return sub_svc_status(svc_status.pkg_ident, - svc_status.remote_sup.inner()).await; - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - #[cfg(not(target_os = "macos"))] - Hab::Term => { - ui.warn("'hab term' as an alias for 'hab sup term' is deprecated. Please \ - update your automation and processes accordingly.")?; - return command::sup::start(ui, &args_after_first(1)).await; - } - Hab::Pkg(pkg) => { - #[allow(clippy::collapsible_match)] - match pkg { - // package export is not available on platforms that have no package support - #[cfg(any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),))] - Pkg::Export(export) => { - match export { - #[cfg(target_os = "linux")] - PkgExportCommand::Cf(args) => { - return command::pkg::export::cf::start(ui, &args.args).await; - } - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Container(args) => { - return command::pkg::export::container::start(ui, &args.args).await; - } - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Docker(args) => { - ui.warn("'hab pkg export docker' is now a deprecated alias \ - for 'hab pkg export container'. Please update your \ - automation and processes accordingly.")?; - return command::pkg::export::container::start(ui, &args.args).await; - } - #[cfg(target_os = "linux")] - PkgExportCommand::Mesos(args) => { - return command::pkg::export::mesos::start(ui, &args.args).await; - } - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Tar(args) => { - return command::pkg::export::tar::start(ui, &args.args).await; - } - } - } - Pkg::Exec(PkgExec { pkg_ident, - cmd, - args, }) => { - return command::pkg::exec::start(&pkg_ident.pkg_ident(), - cmd, - &args.args); - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - Err(e @ ConfigOptError::ConfigGenerated(_) - | e @ ConfigOptError::ConfigFile(..) - | e @ ConfigOptError::Toml(..)) => e.exit(), - Err(_) => { - // Completely ignore all other errors. They will be caught by the CLI parsing logic - // below. - } - }; - - // Similar to the configopt parsing above We build the command tree in a - // separate thread to eliminate possible stack overflow crashes at runtime. - // See known issue:https://github.com/kbknapp/clap-rs/issues/86 - let cli_child = thread::Builder::new().stack_size(8 * 1024 * 1024) - .spawn(move || { - cli::get(feature_flags).get_matches_safe() - .unwrap_or_else(|e| { - e.exit(); - }) - }) - .unwrap(); - let app_matches = cli_child.join().unwrap(); - - match app_matches.subcommand() { - ("apply", Some(m)) => { - ui.warn("'hab apply' as an alias for 'hab config apply' is deprecated. Please \ - update your automation and processes accordingly.")?; - sub_svc_set(m).await? - } - ("cli", Some(matches)) => { - match matches.subcommand() { - ("setup", Some(m)) => sub_cli_setup(ui, m)?, - ("completers", Some(m)) => sub_cli_completers(m, feature_flags), - _ => unreachable!(), - } - } - ("config", Some(m)) => { - match m.subcommand() { - ("apply", Some(m)) => sub_svc_set(m).await?, - ("show", Some(m)) => sub_svc_config(m).await?, - _ => unreachable!(), - } - } - ("file", Some(m)) => { - match m.subcommand() { - ("upload", Some(m)) => sub_file_put(m).await?, - _ => unreachable!(), - } - } - ("install", Some(m)) => { - ui.warn("'hab install' as an alias for 'hab pkg install' is deprecated. Please \ - update your automation and processes accordingly.")?; - sub_pkg_install(ui, m, feature_flags).await? - } - ("origin", Some(matches)) => { - match matches.subcommand() { - ("invitations", Some(m)) => { - match m.subcommand() { - ("accept", Some(sc)) => sub_accept_origin_invitation(ui, sc).await?, - ("ignore", Some(sc)) => sub_ignore_origin_invitation(ui, sc).await?, - ("list", Some(sc)) => sub_list_user_invitations(ui, sc).await?, - ("pending", Some(sc)) => sub_list_pending_origin_invitations(ui, sc).await?, - ("send", Some(sc)) => sub_send_origin_invitation(ui, sc).await?, - ("rescind", Some(sc)) => sub_rescind_origin_invitation(ui, sc).await?, - _ => unreachable!(), - } - } - ("key", Some(m)) => { - match m.subcommand() { - ("download", Some(sc)) => sub_origin_key_download(ui, sc).await?, - ("export", Some(sc)) => sub_origin_key_export(sc)?, - ("generate", Some(sc)) => sub_origin_key_generate(ui, sc)?, - ("import", Some(sc)) => sub_origin_key_import(ui, sc)?, - ("upload", Some(sc)) => sub_origin_key_upload(ui, sc).await?, - _ => unreachable!(), - } - } - ("secret", Some(m)) => { - match m.subcommand() { - ("upload", Some(sc)) => sub_origin_secret_upload(ui, sc).await?, - ("delete", Some(sc)) => sub_origin_secret_delete(ui, sc).await?, - ("list", Some(sc)) => sub_origin_secret_list(ui, sc).await?, - _ => unreachable!(), - } - } - ("create", Some(m)) => sub_origin_create(ui, m).await?, - ("delete", Some(m)) => sub_origin_delete(ui, m).await?, - ("transfer", Some(m)) => sub_origin_transfer_ownership(ui, m).await?, - ("depart", Some(m)) => sub_origin_depart(ui, m).await?, - ("info", Some(m)) => sub_origin_info(ui, m).await?, - _ => unreachable!(), - } - } - ("bldr", Some(matches)) => { - match matches.subcommand() { - ("job", Some(m)) => { - match m.subcommand() { - ("start", Some(m)) => sub_bldr_job_start(ui, m).await?, - ("cancel", Some(m)) => sub_bldr_job_cancel(ui, m).await?, - ("promote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, true).await?, - ("demote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, false).await?, - ("status", Some(m)) => sub_bldr_job_status(ui, m).await?, - _ => unreachable!(), - } - } - ("channel", Some(m)) => { - match m.subcommand() { - ("create", Some(m)) => sub_bldr_channel_create(ui, m).await?, - ("destroy", Some(m)) => sub_bldr_channel_destroy(ui, m).await?, - ("list", Some(m)) => sub_bldr_channel_list(ui, m).await?, - ("promote", Some(m)) => sub_bldr_channel_promote(ui, m).await?, - ("demote", Some(m)) => sub_bldr_channel_demote(ui, m).await?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - ("pkg", Some(matches)) => { - match matches.subcommand() { - ("binds", Some(m)) => sub_pkg_binds(m)?, - ("binlink", Some(m)) => sub_pkg_binlink(ui, m)?, - ("build", Some(m)) => sub_pkg_build(ui, m, feature_flags).await?, - ("channels", Some(m)) => sub_pkg_channels(ui, m).await?, - ("config", Some(m)) => sub_pkg_config(m)?, - ("dependencies", Some(m)) => sub_pkg_dependencies(m)?, - ("download", Some(m)) => sub_pkg_download(ui, m, feature_flags).await?, - ("env", Some(m)) => sub_pkg_env(m)?, - ("hash", Some(m)) => sub_pkg_hash(m)?, - ("install", Some(m)) => sub_pkg_install(ui, m, feature_flags).await?, - ("list", Some(m)) => sub_pkg_list(m)?, - ("path", Some(m)) => sub_pkg_path(m)?, - ("provides", Some(m)) => sub_pkg_provides(m)?, - ("search", Some(m)) => sub_pkg_search(m).await?, - ("sign", Some(m)) => sub_pkg_sign(ui, m)?, - ("uninstall", Some(m)) => sub_pkg_uninstall(ui, m).await?, - ("upload", Some(m)) => sub_pkg_upload(ui, m).await?, - ("bulkupload", Some(m)) => sub_pkg_bulkupload(ui, m).await?, - ("delete", Some(m)) => sub_pkg_delete(ui, m).await?, - ("verify", Some(m)) => sub_pkg_verify(ui, m)?, - ("header", Some(m)) => sub_pkg_header(ui, m)?, - ("info", Some(m)) => sub_pkg_info(ui, m)?, - ("promote", Some(m)) => sub_pkg_promote(ui, m).await?, - ("demote", Some(m)) => sub_pkg_demote(ui, m).await?, - _ => unreachable!(), - } - } - ("plan", Some(matches)) => { - match matches.subcommand() { - ("init", Some(m)) => sub_plan_init(ui, m)?, - ("render", Some(m)) => sub_plan_render(ui, m)?, - _ => unreachable!(), - } - } - ("ring", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("export", Some(sc)) => sub_ring_key_export(sc)?, - ("import", Some(sc)) => sub_ring_key_import(ui, sc)?, - ("generate", Some(sc)) => sub_ring_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - ("svc", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("generate", Some(sc)) => sub_service_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - ("unload", Some(m)) => sub_svc_unload(m).await?, - ("start", Some(m)) => sub_svc_start(m).await?, - ("stop", Some(m)) => sub_svc_stop(m).await?, - _ => unreachable!(), - } - } - ("supportbundle", _) => sub_supportbundle(ui)?, - ("setup", Some(m)) => { - ui.warn("'hab setup' as an alias for 'hab cli setup' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_cli_setup(ui, m)? - } - ("start", Some(m)) => { - ui.warn("'hab start' as an alias for 'hab svc start' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_svc_start(m).await? - } - ("stop", Some(m)) => { - ui.warn("'hab stop' as an alias for 'hab svc stop' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_svc_stop(m).await? - } - ("user", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("generate", Some(sc)) => sub_user_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - _ => unreachable!(), - }; - Ok(()) -} - -fn sub_cli_setup(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::cli::setup::start(ui, &key_cache) -} - -fn sub_cli_completers(m: &ArgMatches<'_>, feature_flags: FeatureFlag) { - let shell = m.value_of("SHELL") - .expect("Missing Shell; A shell is required"); - - // TODO (CM): Interesting... the completions generated can depend - // on what feature flags happen to be enabled at the time you - // generated the completions - cli::get(feature_flags).gen_completions_to("hab", - shell.parse::().unwrap(), - &mut io::stdout()); -} - -async fn sub_origin_key_download(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN").parse()?; - let revision = m.value_of("REVISION"); - let with_secret = m.is_present("WITH_SECRET"); - let with_encryption = m.is_present("WITH_ENCRYPTION"); - let token = maybe_auth_token(m); - let url = bldr_url_from_matches(m)?; - let key_cache = key_cache_from_matches(m)?; - - command::origin::key::download::start(ui, - &url, - &origin, - revision, - with_secret, - with_encryption, - token.as_deref(), - &key_cache).await -} - -fn sub_origin_key_export(m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN").parse()?; - let key_type = KeyType::from_str(m.value_of("KEY_TYPE").unwrap_or("public"))?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::origin::key::export::start(&origin, key_type, &key_cache) -} - -fn sub_origin_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = origin_param_or_env(m)?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::origin::key::generate::start(ui, &origin, &key_cache) -} - -fn sub_origin_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let mut content = String::new(); - let key_cache = key_cache_from_matches(m)?; - init()?; - io::stdin().read_to_string(&mut content)?; - - // Trim the content to lose line feeds added by Powershell pipeline - command::origin::key::import::start(ui, content.trim(), &key_cache) -} - -async fn sub_origin_key_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let key_cache = key_cache_from_matches(m)?; - - init()?; - - match m.value_of("ORIGIN") { - Some(origin) => { - let origin = origin.parse()?; - // you can either specify files, or infer the latest key names - let with_secret = m.is_present("WITH_SECRET"); - command::origin::key::upload_latest::start(ui, - &url, - &token, - &origin, - with_secret, - &key_cache).await - } - None => { - let keyfile = Path::new(required_value_of(m, "PUBLIC_FILE")); - let secret_keyfile = m.value_of("SECRET_FILE").map(Path::new); - command::origin::key::upload::start(ui, &url, &token, keyfile, secret_keyfile).await - } - } -} - -async fn sub_origin_secret_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - let key = required_value_of(m, "KEY_NAME"); - let secret = required_value_of(m, "SECRET"); - let key_cache = key_cache_from_matches(m)?; - command::origin::secret::upload::start(ui, &url, &token, &origin, key, secret, &key_cache).await -} - -async fn sub_origin_secret_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - let key = required_value_of(m, "KEY_NAME"); - command::origin::secret::delete::start(ui, &url, &token, &origin, key).await -} - -async fn sub_origin_secret_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - command::origin::secret::list::start(ui, &url, &token, &origin).await -} - -async fn sub_origin_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::create::start(ui, &url, &token, origin).await -} - -async fn sub_origin_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let to_json = m.is_present("TO_JSON"); - command::origin::info::start(ui, &url, &token, origin, to_json).await -} - -async fn sub_origin_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::delete::start(ui, &url, &token, origin).await -} - -async fn sub_origin_transfer_ownership(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let account = required_value_of(m, "NEW_OWNER_ACCOUNT"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::transfer::start(ui, &url, &token, origin, account).await -} - -async fn sub_origin_depart(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::depart::start(ui, &url, &token, origin).await -} - -async fn sub_accept_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::accept::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_ignore_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::ignore::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_list_user_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::list_user::start(ui, &url, &token).await -} - -async fn sub_list_pending_origin_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::list_pending_origin::start(ui, &url, origin, &token).await -} - -async fn sub_rescind_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::rescind::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_send_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitee_account = required_value_of(m, "INVITEE_ACCOUNT"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::send::start(ui, &url, origin, &token, invitee_account).await -} - -async fn sub_origin_member_role_show(ui: &mut UI, r: RbacShow) -> Result<()> { - let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; - let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; - command::origin::rbac::show_role::start(ui, - bldr_url, - r.origin.inner, - &auth_token, - &r.member_account, - r.to_json).await -} - -async fn sub_origin_member_role_set(ui: &mut UI, r: RbacSet) -> Result<()> { - let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; - let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; - command::origin::rbac::set_role::start(ui, - bldr_url, - r.origin.inner, - &auth_token, - &r.member_account, - r.role, - r.no_prompt).await -} - -fn sub_pkg_binlink(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let dest_dir = Path::new(required_value_of(m, "DEST_DIR")); - let force = m.is_present("FORCE"); - match m.value_of("BINARY") { - Some(binary) => { - command::pkg::binlink::start(ui, &ident, binary, dest_dir, &FS_ROOT_PATH, force) - } - None => { - command::pkg::binlink::binlink_all_in_pkg(ui, &ident, dest_dir, &FS_ROOT_PATH, force) - } - } -} - -/// Generate a (possibly empty) list of `Origin`s from the value of -/// the `HAB_ORIGIN_KEYS` environment variable / `--keys` argument. -fn hab_key_origins(m: &ArgMatches<'_>) -> Result> { - m.values_of("HAB_ORIGIN_KEYS") - .unwrap_or_default() - .map(|n| n.parse().map_err(Into::into)) - .collect() -} - -#[allow(unused_variables)] -async fn sub_pkg_build(ui: &mut UI, m: &ArgMatches<'_>, feature_flags: FeatureFlag) -> Result<()> { - let plan_context = required_value_of(m, "PLAN_CONTEXT"); - let root = m.value_of("HAB_STUDIO_ROOT"); - let src = m.value_of("SRC_PATH"); - let refresh_channel = m.value_of("REFRESH_CHANNEL"); - - let origins = hab_key_origins(m)?; - if !origins.is_empty() { - init()?; - let key_cache = key_cache_from_matches(m)?; - for origin in origins.iter() { - // Validate that a secret signing key is present on disk - // for each origin. - key_cache.latest_secret_origin_signing_key(origin)?; - } - } - - #[cfg(target_family = "unix")] - let native_package = if m.is_present("NATIVE_PACKAGE") { - if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { - return Err(Error::ArgumentError(String::from("`--native-package` is \ - only available when \ - `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ - is set"))); - } - true - } else { - false - }; - #[cfg(target_family = "windows")] - let native_package = false; - - let docker = m.is_present("DOCKER"); - let reuse = m.is_present("REUSE"); - - command::pkg::build::start(ui, - plan_context, - root, - src, - &origins, - native_package, - reuse, - docker, - refresh_channel).await -} - -fn sub_pkg_config(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - common::command::package::config::start(&ident, &*FS_ROOT_PATH)?; - Ok(()) -} - -fn sub_pkg_binds(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - common::command::package::binds::start(&ident, &*FS_ROOT_PATH)?; - Ok(()) -} - -fn sub_pkg_dependencies(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let scope = if m.is_present("TRANSITIVE") { - command::pkg::Scope::PackageAndDependencies - } else { - command::pkg::Scope::Package - }; - - let direction = if m.is_present("REVERSE") { - command::pkg::DependencyRelation::Supports - } else { - command::pkg::DependencyRelation::Requires - }; - command::pkg::dependencies::start(&ident, scope, direction, &FS_ROOT_PATH) -} - -async fn sub_pkg_download(ui: &mut UI, - m: &ArgMatches<'_>, - _feature_flags: FeatureFlag) - -> Result<()> { - let token = maybe_auth_token(m); - let url = bldr_url_from_matches(m)?; - let download_dir = download_dir_from_matches(m); - - // Construct flat file based inputs - let channel = channel_from_matches_or_default(m); - let target = target_from_matches(m)?; - - let install_sources = idents_from_matches(m)?; - - let mut package_sets = vec![PackageSet { target, - channel: channel.clone(), - idents: install_sources }]; - - let mut install_sources_from_file = idents_from_file_matches(ui, m, &channel, target)?; - package_sets.append(&mut install_sources_from_file); - package_sets.retain(|set| !set.idents.is_empty()); - - let verify = verify_from_matches(m); - let ignore_missing_seeds = ignore_missing_seeds_from_matches(m); - - init()?; - - command::pkg::download::start(ui, - &url, - PRODUCT, - VERSION, - &package_sets, - download_dir.as_ref(), - token.as_deref(), - verify, - ignore_missing_seeds).await?; - Ok(()) -} - -fn sub_pkg_env(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - command::pkg::env::start(&ident, &FS_ROOT_PATH) -} - -fn sub_pkg_hash(m: &ArgMatches<'_>) -> Result<()> { - init()?; - match m.value_of("SOURCE") { - Some(source) => { - // hash single file - command::pkg::hash::start(source) - } - None => { - // read files from stdin - let stdin = io::stdin(); - for line in stdin.lock().lines() { - let file = line?; - command::pkg::hash::start(file.trim_end())?; - } - Ok(()) - } - } -} - -async fn sub_pkg_uninstall(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let execute_strategy = if m.is_present("DRYRUN") { - command::pkg::ExecutionStrategy::DryRun - } else { - command::pkg::ExecutionStrategy::Run - }; - let mode = command::pkg::uninstall::UninstallMode::from(m); - let scope = if m.is_present("NO_DEPS") { - command::pkg::Scope::Package - } else { - command::pkg::Scope::PackageAndDependencies - }; - let excludes = excludes_from_matches(m); - let uninstall_hook_mode = if m.is_present("IGNORE_UNINSTALL_HOOK") { - UninstallHookMode::Ignore - } else { - UninstallHookMode::default() - }; - - command::pkg::uninstall::start(ui, - &ident, - &FS_ROOT_PATH, - execute_strategy, - mode, - scope, - &excludes, - uninstall_hook_mode).await -} - -async fn sub_bldr_channel_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - command::bldr::channel::create::start(ui, &url, &token, &origin, &channel).await -} - -async fn sub_bldr_channel_destroy(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - command::bldr::channel::destroy::start(ui, &url, &token, &origin, &channel).await -} - -async fn sub_bldr_channel_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let include_sandbox_channels = m.is_present("SANDBOX"); - command::bldr::channel::list::start(ui, &url, &origin, include_sandbox_channels).await -} - -async fn sub_bldr_channel_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let token = auth_token_param_or_env(m)?; - let source_channel = required_source_channel_from_matches(m); - let target_channel = required_target_channel_from_matches(m); - command::bldr::channel::promote::start(ui, - &url, - &token, - &origin, - &source_channel, - &target_channel).await -} - -async fn sub_bldr_channel_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let token = auth_token_param_or_env(m)?; - let source_channel = required_source_channel_from_matches(m); - let target_channel = required_target_channel_from_matches(m); - command::bldr::channel::demote::start(ui, - &url, - &token, - &origin, - &source_channel, - &target_channel).await -} - -#[allow(unused)] -async fn sub_bldr_job_start(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_cancel(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_promote_or_demote(ui: &mut UI, - m: &ArgMatches<'_>, - promote: bool) - -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_status(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -fn sub_plan_init(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let name = m.value_of("PKG_NAME").map(String::from); - let origin = origin_param_or_env(m)?; - let minimal = m.is_present("MIN"); - let scaffolding_ident = if cfg!(windows) { - match m.value_of("SCAFFOLDING") { - Some(scaffold) => Some(PackageIdent::from_str(scaffold)?), - None => None, - } - } else { - scaffolding::scaffold_check(ui, m.value_of("SCAFFOLDING"))? - }; - - command::plan::init::start(ui, &origin, minimal, scaffolding_ident, name) -} - -fn sub_plan_render(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let template_path = required_value_of(m, "TEMPLATE_PATH"); - let template_path = Path::new(template_path); - - let default_toml_path = required_value_of(m, "DEFAULT_TOML"); - let default_toml_path = Path::new(default_toml_path); - - let user_toml_path = m.value_of("USER_TOML").map(Path::new); - - let mock_data_path = m.value_of("MOCK_DATA").map(Path::new); - - let print = m.is_present("PRINT"); - let render = !m.is_present("NO_RENDER"); - let quiet = m.is_present("QUIET"); - - let render_dir = required_value_of(m, "RENDER_DIR"); - let render_dir = Path::new(render_dir); - - command::plan::render::start(ui, - template_path, - default_toml_path, - user_toml_path, - mock_data_path, - print, - render, - render_dir, - quiet) -} - -async fn sub_pkg_install(ui: &mut UI, - m: &ArgMatches<'_>, - feature_flags: FeatureFlag) - -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = channel_from_matches_or_default(m); - let install_sources = install_sources_from_matches(m)?; - let token = maybe_auth_token(m); - let install_mode = - if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && m.is_present("OFFLINE") { - InstallMode::Offline - } else { - InstallMode::default() - }; - - let local_package_usage = if m.is_present("IGNORE_LOCAL") { - LocalPackageUsage::Ignore - } else { - LocalPackageUsage::default() - }; - - let install_hook_mode = if m.is_present("IGNORE_INSTALL_HOOK") { - InstallHookMode::Ignore - } else { - InstallHookMode::default() - }; - - init()?; - - for install_source in install_sources.iter() { - let pkg_install = - common::command::package::install::start(ui, - &url, - &channel, - install_source, - PRODUCT, - VERSION, - &FS_ROOT_PATH, - &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), - token.as_deref(), - &install_mode, - &local_package_usage, - install_hook_mode).await?; - - if let Some(dest_dir) = binlink_dest_dir_from_matches(m) { - let force = m.is_present("FORCE"); - command::pkg::binlink::binlink_all_in_pkg(ui, - pkg_install.ident(), - &dest_dir, - &FS_ROOT_PATH, - force)?; - } - } - Ok(()) -} - -fn sub_pkg_path(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - command::pkg::path::start(&ident, &FS_ROOT_PATH) -} - -fn sub_pkg_list(m: &ArgMatches<'_>) -> Result<()> { - let listing_type = ListingType::from(m); - - command::pkg::list::start(&listing_type) -} - -fn sub_pkg_provides(m: &ArgMatches<'_>) -> Result<()> { - let filename = required_value_of(m, "FILE"); - - let full_releases = m.is_present("FULL_RELEASES"); - let full_paths = m.is_present("FULL_PATHS"); - - command::pkg::provides::start(filename, &FS_ROOT_PATH, full_releases, full_paths) -} - -async fn sub_pkg_search(m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let search_term = required_value_of(m, "SEARCH_TERM"); - let limit = required_value_of(m, "LIMIT").parse().expect("valid LIMIT"); - let token = maybe_auth_token(m); - command::pkg::search::start(search_term, &url, limit, token.as_deref()).await -} - -fn sub_pkg_sign(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = origin_param_or_env(m)?; - - let src = Path::new(required_value_of(m, "SOURCE")); - let dst = Path::new(required_value_of(m, "DEST")); - - let key_cache = key_cache_from_matches(m)?; - - init()?; - - let key = key_cache.latest_secret_origin_signing_key(&origin)?; - command::pkg::sign::start(ui, &key, src, dst) -} - -async fn sub_pkg_bulkupload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let upload_dir = bulkupload_dir_from_matches(m); - let artifact_path = upload_dir.join("artifacts"); - let key_path = upload_dir.join("keys"); - let key_cache = KeyCache::new(key_path); - key_cache.setup()?; - - let url = bldr_url_from_matches(m)?; - let additional_release_channel = channel_from_matches(m); - let force_upload = m.is_present("FORCE"); - let auto_build = if m.is_present("AUTO_BUILD") { - BuildOnUpload::PackageDefault - } else { - BuildOnUpload::Disable - }; - let auto_create_origins = m.is_present("AUTO_CREATE_ORIGINS"); - let token = auth_token_param_or_env(m)?; - - command::pkg::bulkupload::start(ui, - &url, - &additional_release_channel, - &token, - &artifact_path, - force_upload, - auto_build, - auto_create_origins, - &key_cache).await -} - -async fn sub_pkg_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let key_cache = key_cache_from_matches(m)?; - let url = bldr_url_from_matches(m)?; - - // When packages are uploaded, they *always* go to `unstable`; - // they can optionally get added to another channel, too. - let additional_release_channel = channel_from_matches(m); - - // When packages are uploaded we check if they exist in the db - // before allowing a write to the backend, this bypasses the check - let force_upload = m.is_present("FORCE"); - - let auto_build = if m.is_present("NO_BUILD") { - BuildOnUpload::Disable - } else { - BuildOnUpload::PackageDefault - }; - - let token = auth_token_param_or_env(m)?; - let artifact_paths = m.values_of("HART_FILE").unwrap(); // Required via clap - for artifact_path in artifact_paths.map(Path::new) { - command::pkg::upload::start(ui, - &url, - &additional_release_channel, - &token, - artifact_path, - force_upload, - auto_build, - &key_cache).await?; - } - Ok(()) -} - -async fn sub_pkg_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let ident = required_pkg_ident_from_input(m)?; - let target = target_from_matches(m)?; - - command::pkg::delete::start(ui, &url, (&ident, target), &token).await?; - - Ok(()) -} - -fn sub_pkg_verify(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::pkg::verify::start(ui, src, &key_cache) -} - -fn sub_pkg_header(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - init()?; - - command::pkg::header::start(ui, src) -} - -fn sub_pkg_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - let to_json = m.is_present("TO_JSON"); - init()?; - - command::pkg::info::start(ui, src, to_json) -} - -async fn sub_pkg_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - let target = target_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - command::pkg::promote::start(ui, &url, (&ident, target), &channel, &token).await -} - -async fn sub_pkg_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - let target = target_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - command::pkg::demote::start(ui, &url, (&ident, target), &channel, &token).await -} - -async fn sub_pkg_channels(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - let token = maybe_auth_token(m); - let target = target_from_matches(m)?; - - command::pkg::channels::start(ui, &url, (&ident, target), token.as_deref()).await -} - -async fn sub_svc_set(m: &ArgMatches<'_>) -> Result<()> { - let remote_sup_addr = remote_sup_from_input(m)?; - let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; - let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; - let mut ui = ui::ui(); - let mut validate = sup_proto::ctl::SvcValidateCfg { service_group: - Some(service_group.clone().into()), - ..Default::default() }; - let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_SVC_CFG_SIZE); - let cfg_len = match m.value_of("FILE") { - Some("-") | None => io::stdin().read_to_end(&mut buf)?, - Some(f) => { - let mut file = File::open(f)?; - file.read_to_end(&mut buf)? - } - }; - if cfg_len > sup_proto::butterfly::MAX_SVC_CFG_SIZE { - ui.fatal(format!("Configuration too large. Maximum size allowed is {} bytes.", - sup_proto::butterfly::MAX_SVC_CFG_SIZE))?; - process::exit(1); - } - validate.cfg = Some(buf.clone()); - let key_cache = key_cache_from_matches(m)?; - - let mut set = sup_proto::ctl::SvcSetCfg::default(); - match (service_group.org(), user_param_or_env(m)) { - (Some(_org), Some(username)) => { - let user_key = key_cache.latest_user_secret_key(&username)?; - let service_key = key_cache.latest_service_public_key(&service_group)?; - ui.status(Status::Encrypting, - format!("TOML as {} for {}", - user_key.named_revision(), - service_key.named_revision()))?; - set.cfg = Some(user_key.encrypt_for_service(&buf, &service_key) - .to_string() - .into_bytes()); - set.is_encrypted = Some(true); - } - _ => set.cfg = Some(buf.to_vec()), - } - set.service_group = Some(service_group.into()); - set.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); - ui.begin(format!("Setting new configuration version {} for {}", - set.version - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()), - set.service_group - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()),))?; - ui.status(Status::Creating, "service configuration")?; - let mut response = SrvClient::request(Some(&remote_sup_addr), validate).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - match ErrCode::try_from(m.code) { - Ok(ErrCode::InvalidPayload) => { - ui.warn(m)?; - } - _ => return Err(SrvClientError::from(m).into()), - } - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.status(Status::Applying, format!("via peer {}", remote_sup_addr))?; - let mut response = SrvClient::request(Some(&remote_sup_addr), set).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Applied configuration")?; - Ok(()) -} - -async fn sub_svc_config(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let remote_sup_addr = remote_sup_from_input(m)?; - let msg = sup_proto::ctl::SvcGetDefaultCfg { ident: Some(ident.into()), }; - let mut response = SrvClient::request(remote_sup_addr.as_ref(), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "ServiceCfg" => { - reply.parse::() - .map_err(SrvClientError::Decode)?; - } - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - Ok(()) -} - -async fn sub_svc_load(svc_load: SvcLoad) -> Result<()> { - let remote_sup_addr = svc_load.remote_sup.clone(); - let msg = habitat_sup_protocol::ctl::SvcLoad::try_from(svc_load)?; - gateway_util::send(remote_sup_addr.inner(), msg).await -} - -async fn sub_svc_bulk_load(svc_bulk_load: SvcBulkLoad) -> Result<()> { - let mut errors = HashMap::new(); - for svc_load in svc::svc_loads_from_paths(&svc_bulk_load.svc_config_paths)? { - let ident = svc_load.pkg_ident.clone().pkg_ident(); - if let Err(e) = sub_svc_load(svc_load).await { - errors.insert(ident, e); - } - } - if errors.is_empty() { - Ok(()) - } else { - Err(errors.into()) - } -} - -async fn sub_svc_unload(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let timeout_in_seconds = - parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); - let msg = sup_proto::ctl::SvcUnload { ident: Some(ident.into()), - timeout_in_seconds }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_svc_update(u: hab::cli::hab::svc::Update) -> Result<()> { - let ctl_addr = u.remote_sup.clone(); - let msg: sup_proto::ctl::SvcUpdate = TryFrom::try_from(u)?; - gateway_util::send(ctl_addr.inner(), msg).await -} - -async fn sub_svc_start(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let msg = sup_proto::ctl::SvcStart { ident: Some(ident.into()), }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_svc_status(pkg_ident: Option, - remote_sup: Option<&ResolvedListenCtlAddr>) - -> Result<()> { - let msg = sup_proto::ctl::SvcStatus { ident: pkg_ident.map(Into::into), }; - - let mut out = TabWriter::new(io::stdout()); - let mut response = SrvClient::request(remote_sup, msg).await?; - // Ensure there is at least one result from the server otherwise produce an error - if let Some(message_result) = response.next().await { - let reply = message_result?; - print_svc_status(&mut out, &reply, true)?; - } else { - return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()); - } - while let Some(message_result) = response.next().await { - let reply = message_result?; - print_svc_status(&mut out, &reply, false)?; - } - out.flush()?; - Ok(()) -} - -async fn sub_svc_stop(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let timeout_in_seconds = - parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); - let msg = sup_proto::ctl::SvcStop { ident: Some(ident.into()), - timeout_in_seconds }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_file_put(m: &ArgMatches<'_>) -> Result<()> { - let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; - let remote_sup_addr = remote_sup_from_input(m)?; - let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; - let mut ui = ui::ui(); - let mut msg = sup_proto::ctl::SvcFilePut::default(); - let file = Path::new(required_value_of(m, "FILE")); - if file.metadata()?.len() > sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES as u64 { - ui.fatal(format!("File too large. Maximum size allowed is {} bytes.", - sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES))?; - process::exit(1); - }; - msg.service_group = Some(service_group.clone().into()); - msg.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); - msg.filename = Some(file.file_name().unwrap().to_string_lossy().into_owned()); - let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES); - let key_cache = key_cache_from_matches(m)?; - - ui.begin(format!("Uploading file {} to {} incarnation {}", - file.display(), - msg.version - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()), - msg.service_group - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UKNOWN".to_string()),))?; - ui.status(Status::Creating, "service file")?; - File::open(file)?.read_to_end(&mut buf)?; - match (service_group.org(), user_param_or_env(m)) { - (Some(_org), Some(username)) => { - // That Some(_org) bit is really "was an org specified for - // this service group?" - let user_key = key_cache.latest_user_secret_key(&username)?; - let service_key = key_cache.latest_service_public_key(&service_group)?; - ui.status(Status::Encrypting, - format!("file as {} for {}", - user_key.named_revision(), - service_key.named_revision()))?; - msg.content = Some(user_key.encrypt_for_service(&buf, &service_key) - .to_string() - .into_bytes()); - msg.is_encrypted = Some(true); - } - _ => msg.content = Some(buf.to_vec()), - } - ui.status(Status::Applying, format!("via peer {}", remote_sup_addr)) - .unwrap(); - let mut response = SrvClient::request(Some(&remote_sup_addr), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - match ErrCode::try_from(m.code) { - Ok(ErrCode::InvalidPayload) => { - ui.warn(m)?; - } - _ => return Err(SrvClientError::from(m).into()), - } - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Uploaded file")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -async fn sub_sup_depart(member_id: String, - remote_sup: Option<&ResolvedListenCtlAddr>) - -> Result<()> { - let remote_sup = SrvClient::ctl_addr(remote_sup)?; - let mut ui = ui::ui(); - let msg = sup_proto::ctl::SupDepart { member_id: Some(member_id), }; - - ui.begin(format!("Permanently marking {} as departed", - msg.member_id.as_deref().unwrap_or("UNKNOWN"))) - .unwrap(); - ui.status(Status::Applying, format!("via peer {}", remote_sup)) - .unwrap(); - let mut response = SrvClient::request(Some(&remote_sup), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Departure recorded.")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -async fn sub_sup_restart(remote_sup: Option<&ResolvedListenCtlAddr>) -> Result<()> { - let remote_sup = SrvClient::ctl_addr(remote_sup)?; - let mut ui = ui::ui(); - let msg = sup_proto::ctl::SupRestart::default(); - - ui.begin(format!("Restarting supervisor {}", remote_sup))?; - let mut response = SrvClient::request(Some(&remote_sup), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Restart recorded.")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -fn sub_sup_secret_generate() -> Result<()> { - let mut ui = ui::ui(); - let mut buf = String::new(); - sup_proto::generate_secret_key(&mut buf); - ui.info(buf)?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -fn sub_sup_secret_generate_key(subject_alternative_name: &DnsName, path: PathBuf) -> Result<()> { - Ok(ctl_gateway_tls::generate_self_signed_certificate_and_key(subject_alternative_name, path) - .map_err(habitat_core::Error::from)?) -} - -fn sub_supportbundle(ui: &mut UI) -> Result<()> { - init()?; - - command::supportbundle::start(ui) -} - -fn sub_ring_key_export(m: &ArgMatches<'_>) -> Result<()> { - let ring = required_value_of(m, "RING"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::ring::key::export::start(ring, &key_cache) -} - -fn sub_ring_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ring = required_value_of(m, "RING"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::ring::key::generate::start(ui, ring, &key_cache) -} - -fn sub_ring_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let mut content = String::new(); - let key_cache = key_cache_from_matches(m)?; - init()?; - io::stdin().read_to_string(&mut content)?; - - // Trim the content to lose line feeds added by Powershell pipeline - command::ring::key::import::start(ui, content.trim(), &key_cache) -} - -fn sub_service_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let org = org_param_or_env(m)?; - let service_group = required_value_of(m, "SERVICE_GROUP").parse()?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::service::key::generate::start(ui, &org, &service_group, &key_cache) -} - -fn sub_user_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let user = required_value_of(m, "USER"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::user::key::generate::start(ui, user, &key_cache) -} - -fn args_after_first(args_to_skip: usize) -> Vec { - env::args_os().skip(args_to_skip).collect() -} - -/// Check to see if the user has passed in an AUTH_TOKEN param. If not, check the -/// HAB_AUTH_TOKEN env var. If not, check the CLI config to see if there is a default auth -/// token set. If that's empty too, then error. -fn auth_token_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("AUTH_TOKEN") { - Some(o) => Ok(o.to_string()), - None => { - match henv::var(AUTH_TOKEN_ENVVAR) { - Ok(v) => Ok(v), - Err(_) => { - CliConfig::load()?.auth_token.ok_or_else(|| { - Error::ArgumentError("No auth token \ - specified" - .into()) - }) - } - } - } - } -} - -/// Check to see if an auth token exists and convert it to a string slice if it does. Unlike -/// auth_token_param_or_env, it's ok for no auth token to be present here. This is useful for -/// commands that can optionally take an auth token for operating on private packages. -fn maybe_auth_token(m: &ArgMatches<'_>) -> Option { - match auth_token_param_or_env(m) { - Ok(t) => Some(t), - Err(_) => None, - } -} - -/// Check to see if the user has passed in an ORIGIN param. If not, check the HABITAT_ORIGIN env -/// var. If not, check the CLI config to see if there is a default origin set. If that's empty too, -/// then error. -// TODO (CM): sort out types better... there's a conflict with the CLI -// Origin in this module -fn origin_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("ORIGIN") { - Some(o) => Ok(o.parse()?), - None => { - match henv::var(ORIGIN_ENVVAR) { - Ok(v) => Ok(v.parse()?), - Err(_) => { - CliConfig::load()?.origin.ok_or_else(|| { - Error::CryptoCLI("No origin specified".to_string()) - }) - } - } - } - } -} - -/// Check to see if the user has passed in an ORG param. -/// If not, check the HABITAT_ORG env var. If that's -/// empty too, then error. -fn org_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("ORG") { - Some(o) => Ok(o.to_string()), - None => henv::var(HABITAT_ORG_ENVVAR) - .map_err(|_| Error::CryptoCLI("No organization specified".to_string())), - } -} - -/// Check to see if the user has passed in a Builder URL param. If not, check the HAB_BLDR_URL env -/// var. If not, check the CLI config to see if there is a default url set. If that's empty too, -/// then we'll use the default (https://bldr.habitat.sh). -fn bldr_url_from_matches(matches: &ArgMatches<'_>) -> Result { - match matches.value_of("BLDR_URL") { - Some(url) => Ok(url.to_string()), - None => { - match henv::var(BLDR_URL_ENVVAR) { - Ok(v) => Ok(v), - Err(_) => { - let config = CliConfig::load()?; - match config.bldr_url { - Some(v) => Ok(v), - None => Ok(default_bldr_url()), - } - } - } - } - } -} - -/// Resolve a channel. Taken from the environment or from CLI args, if -/// given. -fn channel_from_matches(matches: &ArgMatches<'_>) -> Option { - matches.value_of("CHANNEL").map(ChannelIdent::from) -} - -/// Resolve a channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - channel_from_matches(matches).unwrap() -} - -/// Resolve a target channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_target_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - matches.value_of("TARGET_CHANNEL") - .map(ChannelIdent::from) - .expect("TARGET_CHANNEL is a required argument!") -} - -/// Resolve a source channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_source_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - matches.value_of("SOURCE_CHANNEL") - .map(ChannelIdent::from) - .expect("SOURCE_CHANNEl is a required argument!") -} -/// Resolve a channel. Taken from the environment or from CLI args, if -/// given or return the default channel value. -fn channel_from_matches_or_default(matches: &ArgMatches<'_>) -> ChannelIdent { - channel_from_matches(matches).unwrap_or_else(ChannelIdent::configured_value) -} - -/// Resolve a target. Default to x86_64-linux if none specified -fn target_from_matches(matches: &ArgMatches<'_>) -> Result { - matches.value_of("PKG_TARGET") - .map(PackageTarget::from_str) - .unwrap_or_else(|| Ok(active_target())) - .map_err(Error::HabitatCore) -} - -/// Return the path to create our binlinks in, or None if no binlinking should occur -fn binlink_dest_dir_from_matches(matches: &ArgMatches<'_>) -> Option { - // is_present always returns true since BINLINK_DIR has a default value, so we need to use - // occurrences_of to determine whether we actually want to do the binlinking - if matches.is_present("BINLINK") || matches.occurrences_of("BINLINK_DIR") > 0 { - matches.value_of("BINLINK_DIR").map(PathBuf::from) + if cfg!(feature = "v4") { + main_v4::main_v4().await } else { - None - } -} - -/// Helper function to determine active package target. -/// It overrides x86_64-darwin to be x86_64-linux in order -/// to provide a better user experience (ie, for the 99% case) -fn active_target() -> PackageTarget { - match PackageTarget::active_target() { - #[cfg(feature = "supported_targets")] - target::X86_64_DARWIN => target::X86_64_LINUX, - t => t, - } -} - -fn install_sources_from_matches(matches: &ArgMatches<'_>) -> Result> { - matches - .values_of("PKG_IDENT_OR_ARTIFACT") - .unwrap() // Required via clap - .map(|t| t.parse().map_err(Error::from)) - .collect() -} - -fn idents_from_matches(matches: &ArgMatches<'_>) -> Result> { - match matches.values_of("PKG_IDENT") { - Some(ident_strings) => { - ident_strings.map(|t| PackageIdent::from_str(t).map_err(Error::from)) - .collect() - } - _ => Ok(Vec::new()), // It's not an error to have no idents on command line - } -} - -fn idents_from_file_matches(ui: &mut UI, - matches: &ArgMatches<'_>, - cli_channel: &ChannelIdent, - cli_target: PackageTarget) - -> Result> { - let mut sources: Vec = Vec::new(); - - if let Some(files) = matches.values_of("PKG_IDENT_FILE") { - for f in files { - let filename = &f.to_string(); - if habitat_common::cli::is_toml_file(filename) { - let mut package_sets = idents_from_toml_file(ui, filename)?; - sources.append(&mut package_sets) - } else { - let idents_from_file = habitat_common::cli::file_into_idents(filename)?; - let package_set = PackageSet { idents: idents_from_file, - channel: cli_channel.clone(), - target: cli_target, }; - sources.push(package_set) - } - } - } - Ok(sources) -} - -fn idents_from_toml_file(ui: &mut UI, filename: &str) -> Result> { - let mut sources: Vec = Vec::new(); - - let file_data = std::fs::read_to_string(filename)?; - let toml_data: PackageSetFile = - toml::from_str(&file_data).map_err(habitat_common::Error::TomlParser)?; - - // We currently only accept version 1 - if toml_data.format_version.unwrap_or(1) != 1 { - return Err(Error::PackageSetParseError(format!( - "format_version invalid, only version 1 allowed ({} provided", - toml_data.format_version.unwrap() - ))); - } - - ui.status(Status::Using, - format!("File {}, '{}'", - filename, - toml_data.file_descriptor.unwrap_or_default()))?; - - for (target, target_array) in toml_data.targets { - for package_set_value in target_array { - let channel = package_set_value.channel; - let idents: Vec = strings_to_idents(&package_set_value.packages)?; - let package_set = PackageSet { target, - channel, - idents }; - debug!("Package Set {:?}", package_set); - sources.push(package_set) - } - } - Ok(sources) -} - -fn strings_to_idents(strings: &[String]) -> Result> { - let ident_or_results: Result> = - strings.iter() - .map(|s| PackageIdent::from_str(s).map_err(Error::from)) - .collect(); - ident_or_results -} - -fn verify_from_matches(matches: &ArgMatches<'_>) -> bool { matches.is_present("VERIFY") } -fn ignore_missing_seeds_from_matches(matches: &ArgMatches<'_>) -> bool { - matches.is_present("IGNORE_MISSING_SEEDS") -} - -fn download_dir_from_matches(matches: &ArgMatches<'_>) -> Option { - matches.value_of("DOWNLOAD_DIRECTORY").map(PathBuf::from) -} - -fn excludes_from_matches(matches: &ArgMatches<'_>) -> Vec { - matches - .values_of("EXCLUDE") - .unwrap_or_default() - .map(|i| PackageIdent::from_str(i).unwrap()) // unwrap safe as we've validated the input - .collect() -} - -fn print_svc_status(out: &mut T, - reply: &SrvMessage, - print_header: bool) - -> result::Result<(), SrvClientError> - where T: io::Write -{ - let status = match reply.message_id() { - "ServiceStatus" => { - reply.parse::() - .map_err(SrvClientError::Decode)? - } - "NetOk" => { - println!("No services loaded."); - return Ok(()); - } - "NetErr" => { - let err = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(err)); - } - _ => { - warn!("Unexpected status message, {:?}", reply); - return Ok(()); - } - }; - let svc_desired_state = status.desired_state - .map_or("".to_string(), |s| s.to_string()); - let (svc_state, svc_pid, svc_elapsed) = { - match status.process { - Some(process) => { - (process.state.to_string(), - process.pid - .map_or_else(|| "".to_string(), |p| p.to_string()), - process.elapsed.unwrap_or_default().to_string()) - } - None => { - (ProcessState::default().to_string(), "".to_string(), "".to_string()) - } - } - }; - if print_header { - writeln!(out, "{}", STATUS_HEADER.join("\t")).unwrap(); - } - // Composites were removed in 0.75 but people could be - // depending on the exact format of this output even if they - // never used composites. We don't want to break their tooling - // so we hardcode in 'standalone' as it's the only supported - // package type - // - // TODO: Remove this when we have a stable machine-readable alternative - // that scripts could depend on - writeln!(out, - "{}\tstandalone\t{}\t{}\t{}\t{}\t{}", - status.ident, - DesiredState::from_str(&svc_desired_state)?, - ProcessState::from_str(&svc_state)?, - svc_elapsed, - svc_pid, - status.service_group,)?; - Ok(()) -} - -fn bulkupload_dir_from_matches(matches: &ArgMatches<'_>) -> PathBuf { - matches.value_of("UPLOAD_DIRECTORY") - .map(PathBuf::from) - .expect("CLAP-validated upload dir") -} - -fn remote_sup_from_input(m: &ArgMatches<'_>) -> Result> { - Ok(m.value_of("REMOTE_SUP") - .map(ResolvedListenCtlAddr::from_str) - .transpose()?) -} - -fn required_pkg_ident_from_input(m: &ArgMatches<'_>) -> Result { - Ok(m.value_of("PKG_IDENT") - .expect("PKG_IDENT is a required argument") - .parse()?) -} - -/// Check to see if the user has passed in a USER param. -/// If not, check the HAB_USER env var. If that's -/// empty too, then return an error. -fn user_param_or_env(m: &ArgMatches<'_>) -> Option { - match m.value_of("USER") { - Some(u) => Some(u.to_string()), - None => { - match env::var(HABITAT_USER_ENVVAR) { - Ok(v) => Some(v), - Err(_) => None, - } - } - } -} - -/// Helper function to get information about the argument given its name -fn required_value_of<'a>(matches: &'a ArgMatches<'a>, name: &str) -> &'a str { - matches.value_of(name) - .unwrap_or_else(|| panic!("{} CLAP required arg missing", name)) -} - -#[cfg(test)] -mod test { - use super::*; - - mod binlink_dest_dir_from_matches { - use super::*; - - habitat_core::locked_env_var!(HAB_BINLINK_DIR, lock_binlink_env_var); - - #[test] - fn no_binlink_arg() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none(), - "without a --binlink arg, there should be no BINLINK matches"); - } - - #[test] - fn env_var_but_no_binlink_arg() { - let env_var = lock_binlink_env_var(); - env_var.set("/val/from/env/var"); - - assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none()); - } - - #[test] - #[should_panic(expected = "Invalid value")] - fn env_var_empty() { - let env_var = lock_binlink_env_var(); - env_var.set(""); - - dest_dir_from_pkg_install(&["origin/pkg"]); - } - - #[test] - fn env_var_overrides_binlink_default() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink"]), - Some(env_var_val.into()), - "with a no-value --binlink arg, the env var value should override the \ - default"); - } - - #[test] - fn binlink_dir_implies_binlink() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink-dir", arg_val]), - Some(arg_val.into())); - } - - #[test] - fn arg_val_overrides_default() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", - "--binlink", - "--binlink-dir", - arg_val]), - Some(arg_val.into()), - "The --binlink value should override the default"); - } - - #[test] - fn arg_val_overrides_env_var() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", - "--binlink", - "--binlink-dir", - arg_val]), - Some(arg_val.into()), - "The --binlink value should override the env var value"); - } - - #[test] - fn binlink_before_pkg_ident_ok() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), - Some(habitat_common::cli::DEFAULT_BINLINK_DIR.into())); - } - - #[test] - fn binlink_before_pkg_ident_with_env_var_ok() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), - Some(env_var_val.into())); - } - - fn matches_for_pkg_install<'a>(pkg_install_args: &'a [&'a str]) -> ArgMatches<'a> { - let pre_pkg_install_args = &["hab", "pkg", "install"]; - let app_matches = cli::get(FeatureFlag::empty()) - .get_matches_from_safe(pre_pkg_install_args.iter().chain(pkg_install_args.iter())) - .unwrap(); // Force panics on CLAP errors, so we can use #[should_panic] - match app_matches.subcommand() { - ("pkg", Some(matches)) => { - match matches.subcommand() { - ("install", Some(m)) => { - println!("{:#?}", m); - m.clone() - } - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - - fn dest_dir_from_pkg_install(pkg_install_args: &[&str]) -> Option { - let pkg_install_matches = &matches_for_pkg_install(pkg_install_args); - binlink_dest_dir_from_matches(pkg_install_matches) - } + #[cfg(feature = "v2")] + main_v2::main_v2().await } } diff --git a/components/hab/src/main_v2.rs b/components/hab/src/main_v2.rs new file mode 100755 index 0000000000..0313e2c931 --- /dev/null +++ b/components/hab/src/main_v2.rs @@ -0,0 +1,2174 @@ +use clap::{value_t, + ArgMatches, + ErrorKind as ClapErrorKind, + Shell}; +use configopt::{ConfigOpt, + Error as ConfigOptError}; +use futures::stream::StreamExt; +#[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),))] +use hab::cli::hab::pkg::ExportCommand as PkgExportCommand; +use hab::{cli::{self, + gateway_util, + hab::{license::License, + origin::{Rbac, + RbacSet, + RbacShow}, + pkg::PkgExec, + svc::{self, + BulkLoad as SvcBulkLoad, + Load as SvcLoad, + Svc}, + util::{bldr_auth_token_from_args_env_or_load, + bldr_url_from_args_env_load_or_default}, + Hab, + Origin, + Pkg}, + parse_optional_arg}, + command::{self, + pkg::{download::{PackageSet, + PackageSetFile}, + list::ListingType, + uninstall::UninstallHookMode}}, + error::{Error, + Result}, + key_type::KeyType, + license, + scaffolding, + AUTH_TOKEN_ENVVAR, + BLDR_URL_ENVVAR, + ORIGIN_ENVVAR, + PRODUCT, + VERSION}; +use habitat_api_client::BuildOnUpload; +use habitat_common::{self as common, + cli::key_cache_from_matches, + cli_config::CliConfig, + command::package::install::{InstallHookMode, + InstallMode, + InstallSource, + LocalPackageUsage}, + types::ResolvedListenCtlAddr, + ui::{self, + Status, + UIWriter, + UI}, + FeatureFlag}; +use habitat_core::{crypto::{init, + keys::{Key, + KeyCache}}, + env::{self as henv, + Config as _}, + fs::{cache_artifact_path, + FS_ROOT_PATH}, + os::process::ShutdownTimeout, + package::{target, + PackageIdent, + PackageTarget}, + service::ServiceGroup, + url::default_bldr_url, + ChannelIdent}; +use habitat_sup_client::{SrvClient, + SrvClientError}; +use habitat_sup_protocol::{self as sup_proto, + codec::*, + net::ErrCode, + types::*}; +use lazy_static::lazy_static; +use log::{debug, + warn}; +use std::{collections::HashMap, + convert::TryFrom, + env, + ffi::OsString, + fs::File, + io::{self, + prelude::*, + Read}, + path::{Path, + PathBuf}, + process, + result, + str::FromStr, + string::ToString, + thread}; +use tabwriter::TabWriter; + +#[cfg(not(target_os = "macos"))] +use hab::cli::hab::sup::{HabSup, + Secret, + Sup}; +#[cfg(not(target_os = "macos"))] +use habitat_core::tls::ctl_gateway as ctl_gateway_tls; +#[cfg(not(target_os = "macos"))] +use webpki::types::DnsName; + +/// Makes the --org CLI param optional when this env var is set +const HABITAT_ORG_ENVVAR: &str = "HAB_ORG"; +/// Makes the --user CLI param optional when this env var is set +const HABITAT_USER_ENVVAR: &str = "HAB_USER"; + +lazy_static! { + static ref STATUS_HEADER: Vec<&'static str> = { + vec!["package", + "type", + "desired", + "state", + "elapsed (s)", + "pid", + "group",] + }; +} + +#[cfg(feature = "v2")] +pub(crate) async fn main_v2() { + env_logger::init(); + let mut ui = UI::default_with_env(); + let flags = FeatureFlag::from_env(&mut ui); + if let Err(e) = start(&mut ui, flags).await { + let exit_code = e.exit_code(); + ui.fatal(e).unwrap(); + std::process::exit(exit_code) + } +} + +#[allow(clippy::cognitive_complexity)] +async fn start(ui: &mut UI, feature_flags: FeatureFlag) -> Result<()> { + // We parse arguments with configopt in a separate thread to eliminate + // possible stack overflow crashes at runtime. OSX or a debug Windows build, + // for instance, will crash with our large tree. This is a known issue: + // https://github.com/kbknapp/clap-rs/issues/86 + let child = thread::Builder::new().stack_size(8 * 1024 * 1024) + .spawn(Hab::try_from_args_with_configopt) + .unwrap(); + let hab = child.join().unwrap(); + + if let Ok(Hab::License(License::Accept)) = hab { + license::accept_license(ui)?; + return Ok(()); + } + + // Allow checking version information and displaying command help without accepting the license. + // TODO (DM): To prevent errors in discrepancy between the structopt and cli versions only do + // this when the license has not yet been accepted. When we switch fully to structopt this can + // be completely removed and we should just call `Hab::from_args_with_configopt` which will + // automatically result in this functionality. + if !license::check_for_license_acceptance().unwrap_or_default() + .accepted() + { + if let Err(ConfigOptError::Clap(e)) = &hab { + if e.kind == ClapErrorKind::VersionDisplayed || e.kind == ClapErrorKind::HelpDisplayed { + e.exit() + } + } + } + + // We must manually detect a supervisor version check and call the `hab-sup` binary to get the + // true Supervisor version. + // TODO (DM): This is an ugly consequence of having `hab sup` subcommands handled by both the + // `hab` binary and the `hab-sup` binary. Potential fixes: + // 1. Handle all `hab sup` subcommands with the `hab-sup` binary + // 2. Have a dedicated subcommand for commands handled by the `hab-sup` binary + let mut args = env::args(); + if matches!((args.next().unwrap_or_default().as_str(), + args.next().unwrap_or_default().as_str(), + args.next().unwrap_or_default().as_str()), + (_, "sup", "--version") | (_, "sup", "-V")) + { + return command::sup::start(ui, &args_after_first(2)).await; + } + + license::check_for_license_acceptance_and_prompt(ui)?; + + // Parse and handle commands which have been migrated to use `structopt` here. Once everything + // is migrated to use `structopt` the parsing logic below this using clap directly will be gone. + match hab { + Ok(hab) => { + match hab { + Hab::Origin(Origin::Rbac(action)) => { + match action { + Rbac::Set(rbac_set) => { + return sub_origin_member_role_set(ui, rbac_set).await; + } + Rbac::Show(rbac_show) => { + return sub_origin_member_role_show(ui, rbac_show).await; + } + } + } + #[cfg(not(target_os = "macos"))] + Hab::Run(sup_run) => { + ui.warn("'hab run' as an alias for 'hab sup run' is deprecated. Please \ + update your automation and processes accordingly.")?; + return command::launcher::start(ui, sup_run, &args_after_first(1)).await; + } + #[cfg(any(target_os = "macos", + any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),)))] + Hab::Studio(studio) => { + return command::studio::enter::start(ui, studio.args()).await; + } + #[cfg(not(target_os = "macos"))] + Hab::Sup(sup) => { + match sup { + HabSup::Sup(sup) => { + // These commands are handled by the `hab-sup` or `hab-launch` binaries. + // We need to pass the subcommand that was issued to the underlying + // binary. It is a bit hacky, but to do that we strip off the `hab sup` + // command prefix and pass the rest of the args to underlying binary. + let args = args_after_first(2); + match sup { + #[cfg(any( + all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"), + ))] + Sup::Bash | Sup::Sh => { + return command::sup::start(ui, &args).await; + } + Sup::Term => { + return command::sup::start(ui, &args).await; + } + Sup::Run(sup_run) => { + return command::launcher::start(ui, sup_run, &args).await; + } + } + } + HabSup::Depart { member_id, + remote_sup, } => { + return sub_sup_depart(member_id, remote_sup.inner()).await; + } + HabSup::Secret(secret) => { + match secret { + Secret::Generate => return sub_sup_secret_generate(), + Secret::GenerateTls { subject_alternative_name, + path, } => { + return sub_sup_secret_generate_key(&subject_alternative_name.dns_name()?, + path) + } + } + } + HabSup::Status { pkg_ident, + remote_sup, } => { + ui.warn("'hab sup status' as an alias for 'hab svc status' is \ + deprecated. Please update your automation and processes \ + accordingly.")?; + return sub_svc_status(pkg_ident, remote_sup.inner()).await; + } + HabSup::Restart { remote_sup } => { + return sub_sup_restart(remote_sup.inner()).await; + } + } + } + Hab::Svc(svc) => { + match svc { + Svc::BulkLoad(svc_bulk_load) => { + if feature_flags.contains(FeatureFlag::SERVICE_CONFIG_FILES) { + return sub_svc_bulk_load(svc_bulk_load).await; + } else { + return Err(Error::ArgumentError(String::from("`hab svc bulkload` is only available when `HAB_FEAT_SERVICE_CONFIG_FILES` is set"))); + } + } + Svc::Load(svc_load) => { + return sub_svc_load(svc_load).await; + } + Svc::Update(svc_update) => return sub_svc_update(svc_update).await, + Svc::Status(svc_status) => { + return sub_svc_status(svc_status.pkg_ident, + svc_status.remote_sup.inner()).await; + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + #[cfg(not(target_os = "macos"))] + Hab::Term => { + ui.warn("'hab term' as an alias for 'hab sup term' is deprecated. Please \ + update your automation and processes accordingly.")?; + return command::sup::start(ui, &args_after_first(1)).await; + } + Hab::Pkg(pkg) => { + #[allow(clippy::collapsible_match)] + match pkg { + // package export is not available on platforms that have no package support + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),))] + Pkg::Export(export) => { + match export { + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Container(args) => { + return command::pkg::export::container::start(ui, &args.args).await; + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Docker(args) => { + ui.warn("'hab pkg export docker' is now a deprecated alias \ + for 'hab pkg export container'. Please update your \ + automation and processes accordingly.")?; + return command::pkg::export::container::start(ui, &args.args).await; + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Tar(args) => { + return command::pkg::export::tar::start(ui, &args.args).await; + } + } + } + Pkg::Exec(PkgExec { pkg_ident, + cmd, + args, }) => { + return command::pkg::exec::start(&pkg_ident.pkg_ident(), + cmd, + &args.args); + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + Err(e @ ConfigOptError::ConfigGenerated(_) + | e @ ConfigOptError::ConfigFile(..) + | e @ ConfigOptError::Toml(..)) => e.exit(), + Err(_) => { + // Completely ignore all other errors. They will be caught by the CLI parsing logic + // below. + } + }; + + // Similar to the configopt parsing above We build the command tree in a + // separate thread to eliminate possible stack overflow crashes at runtime. + // See known issue:https://github.com/kbknapp/clap-rs/issues/86 + let cli_child = thread::Builder::new().stack_size(8 * 1024 * 1024) + .spawn(move || { + cli::get(feature_flags).get_matches_safe() + .unwrap_or_else(|e| { + e.exit(); + }) + }) + .unwrap(); + let app_matches = cli_child.join().unwrap(); + + match app_matches.subcommand() { + ("apply", Some(m)) => { + ui.warn("'hab apply' as an alias for 'hab config apply' is deprecated. Please \ + update your automation and processes accordingly.")?; + sub_svc_set(m).await? + } + ("cli", Some(matches)) => { + match matches.subcommand() { + ("setup", Some(m)) => sub_cli_setup(ui, m)?, + ("completers", Some(m)) => sub_cli_completers(m, feature_flags), + _ => unreachable!(), + } + } + ("config", Some(m)) => { + match m.subcommand() { + ("apply", Some(m)) => sub_svc_set(m).await?, + ("show", Some(m)) => sub_svc_config(m).await?, + _ => unreachable!(), + } + } + ("file", Some(m)) => { + match m.subcommand() { + ("upload", Some(m)) => sub_file_put(m).await?, + _ => unreachable!(), + } + } + ("install", Some(m)) => { + ui.warn("'hab install' as an alias for 'hab pkg install' is deprecated. Please \ + update your automation and processes accordingly.")?; + sub_pkg_install(ui, m, feature_flags).await? + } + ("origin", Some(matches)) => { + match matches.subcommand() { + ("invitations", Some(m)) => { + match m.subcommand() { + ("accept", Some(sc)) => sub_accept_origin_invitation(ui, sc).await?, + ("ignore", Some(sc)) => sub_ignore_origin_invitation(ui, sc).await?, + ("list", Some(sc)) => sub_list_user_invitations(ui, sc).await?, + ("pending", Some(sc)) => sub_list_pending_origin_invitations(ui, sc).await?, + ("send", Some(sc)) => sub_send_origin_invitation(ui, sc).await?, + ("rescind", Some(sc)) => sub_rescind_origin_invitation(ui, sc).await?, + _ => unreachable!(), + } + } + ("key", Some(m)) => { + match m.subcommand() { + ("download", Some(sc)) => sub_origin_key_download(ui, sc).await?, + ("export", Some(sc)) => sub_origin_key_export(sc)?, + ("generate", Some(sc)) => sub_origin_key_generate(ui, sc)?, + ("import", Some(sc)) => sub_origin_key_import(ui, sc)?, + ("upload", Some(sc)) => sub_origin_key_upload(ui, sc).await?, + _ => unreachable!(), + } + } + ("secret", Some(m)) => { + match m.subcommand() { + ("upload", Some(sc)) => sub_origin_secret_upload(ui, sc).await?, + ("delete", Some(sc)) => sub_origin_secret_delete(ui, sc).await?, + ("list", Some(sc)) => sub_origin_secret_list(ui, sc).await?, + _ => unreachable!(), + } + } + ("create", Some(m)) => sub_origin_create(ui, m).await?, + ("delete", Some(m)) => sub_origin_delete(ui, m).await?, + ("transfer", Some(m)) => sub_origin_transfer_ownership(ui, m).await?, + ("depart", Some(m)) => sub_origin_depart(ui, m).await?, + ("info", Some(m)) => sub_origin_info(ui, m).await?, + _ => unreachable!(), + } + } + ("bldr", Some(matches)) => { + match matches.subcommand() { + ("job", Some(m)) => { + match m.subcommand() { + ("start", Some(m)) => sub_bldr_job_start(ui, m).await?, + ("cancel", Some(m)) => sub_bldr_job_cancel(ui, m).await?, + ("promote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, true).await?, + ("demote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, false).await?, + ("status", Some(m)) => sub_bldr_job_status(ui, m).await?, + _ => unreachable!(), + } + } + ("channel", Some(m)) => { + match m.subcommand() { + ("create", Some(m)) => sub_bldr_channel_create(ui, m).await?, + ("destroy", Some(m)) => sub_bldr_channel_destroy(ui, m).await?, + ("list", Some(m)) => sub_bldr_channel_list(ui, m).await?, + ("promote", Some(m)) => sub_bldr_channel_promote(ui, m).await?, + ("demote", Some(m)) => sub_bldr_channel_demote(ui, m).await?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + ("pkg", Some(matches)) => { + match matches.subcommand() { + ("binds", Some(m)) => sub_pkg_binds(m)?, + ("binlink", Some(m)) => sub_pkg_binlink(ui, m)?, + ("build", Some(m)) => sub_pkg_build(ui, m, feature_flags).await?, + ("channels", Some(m)) => sub_pkg_channels(ui, m).await?, + ("config", Some(m)) => sub_pkg_config(m)?, + ("dependencies", Some(m)) => sub_pkg_dependencies(m)?, + ("download", Some(m)) => sub_pkg_download(ui, m, feature_flags).await?, + ("env", Some(m)) => sub_pkg_env(m)?, + ("hash", Some(m)) => sub_pkg_hash(m)?, + ("install", Some(m)) => sub_pkg_install(ui, m, feature_flags).await?, + ("list", Some(m)) => sub_pkg_list(m)?, + ("path", Some(m)) => sub_pkg_path(m)?, + ("provides", Some(m)) => sub_pkg_provides(m)?, + ("search", Some(m)) => sub_pkg_search(m).await?, + ("sign", Some(m)) => sub_pkg_sign(ui, m)?, + ("uninstall", Some(m)) => sub_pkg_uninstall(ui, m).await?, + ("upload", Some(m)) => sub_pkg_upload(ui, m).await?, + ("bulkupload", Some(m)) => sub_pkg_bulkupload(ui, m).await?, + ("delete", Some(m)) => sub_pkg_delete(ui, m).await?, + ("verify", Some(m)) => sub_pkg_verify(ui, m)?, + ("header", Some(m)) => sub_pkg_header(ui, m)?, + ("info", Some(m)) => sub_pkg_info(ui, m)?, + ("promote", Some(m)) => sub_pkg_promote(ui, m).await?, + ("demote", Some(m)) => sub_pkg_demote(ui, m).await?, + _ => unreachable!(), + } + } + ("plan", Some(matches)) => { + match matches.subcommand() { + ("init", Some(m)) => sub_plan_init(ui, m)?, + ("render", Some(m)) => sub_plan_render(ui, m)?, + _ => unreachable!(), + } + } + ("ring", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("export", Some(sc)) => sub_ring_key_export(sc)?, + ("import", Some(sc)) => sub_ring_key_import(ui, sc)?, + ("generate", Some(sc)) => sub_ring_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + ("svc", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("generate", Some(sc)) => sub_service_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + ("unload", Some(m)) => sub_svc_unload(m).await?, + ("start", Some(m)) => sub_svc_start(m).await?, + ("stop", Some(m)) => sub_svc_stop(m).await?, + _ => unreachable!(), + } + } + ("supportbundle", _) => sub_supportbundle(ui)?, + ("setup", Some(m)) => { + ui.warn("'hab setup' as an alias for 'hab cli setup' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_cli_setup(ui, m)? + } + ("start", Some(m)) => { + ui.warn("'hab start' as an alias for 'hab svc start' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_svc_start(m).await? + } + ("stop", Some(m)) => { + ui.warn("'hab stop' as an alias for 'hab svc stop' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_svc_stop(m).await? + } + ("user", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("generate", Some(sc)) => sub_user_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + _ => unreachable!(), + }; + Ok(()) +} + +fn sub_cli_setup(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::cli::setup::start(ui, &key_cache) +} + +fn sub_cli_completers(m: &ArgMatches<'_>, feature_flags: FeatureFlag) { + let shell = m.value_of("SHELL") + .expect("Missing Shell; A shell is required"); + + // TODO (CM): Interesting... the completions generated can depend + // on what feature flags happen to be enabled at the time you + // generated the completions + cli::get(feature_flags).gen_completions_to("hab", + shell.parse::().unwrap(), + &mut io::stdout()); +} + +async fn sub_origin_key_download(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN").parse()?; + let revision = m.value_of("REVISION"); + let with_secret = m.is_present("WITH_SECRET"); + let with_encryption = m.is_present("WITH_ENCRYPTION"); + let token = maybe_auth_token(m); + let url = bldr_url_from_matches(m)?; + let key_cache = key_cache_from_matches(m)?; + + command::origin::key::download::start(ui, + &url, + &origin, + revision, + with_secret, + with_encryption, + token.as_deref(), + &key_cache).await +} + +fn sub_origin_key_export(m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN").parse()?; + let key_type = KeyType::from_str(m.value_of("KEY_TYPE").unwrap_or("public"))?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::origin::key::export::start(&origin, key_type, &key_cache) +} + +fn sub_origin_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = origin_param_or_env(m)?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::origin::key::generate::start(ui, &origin, &key_cache) +} + +fn sub_origin_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let mut content = String::new(); + let key_cache = key_cache_from_matches(m)?; + init()?; + io::stdin().read_to_string(&mut content)?; + + // Trim the content to lose line feeds added by Powershell pipeline + command::origin::key::import::start(ui, content.trim(), &key_cache) +} + +async fn sub_origin_key_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let key_cache = key_cache_from_matches(m)?; + + init()?; + + match m.value_of("ORIGIN") { + Some(origin) => { + let origin = origin.parse()?; + // you can either specify files, or infer the latest key names + let with_secret = m.is_present("WITH_SECRET"); + command::origin::key::upload_latest::start(ui, + &url, + &token, + &origin, + with_secret, + &key_cache).await + } + None => { + let keyfile = Path::new(required_value_of(m, "PUBLIC_FILE")); + let secret_keyfile = m.value_of("SECRET_FILE").map(Path::new); + command::origin::key::upload::start(ui, &url, &token, keyfile, secret_keyfile).await + } + } +} + +async fn sub_origin_secret_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + let key = required_value_of(m, "KEY_NAME"); + let secret = required_value_of(m, "SECRET"); + let key_cache = key_cache_from_matches(m)?; + command::origin::secret::upload::start(ui, &url, &token, &origin, key, secret, &key_cache).await +} + +async fn sub_origin_secret_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + let key = required_value_of(m, "KEY_NAME"); + command::origin::secret::delete::start(ui, &url, &token, &origin, key).await +} + +async fn sub_origin_secret_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + command::origin::secret::list::start(ui, &url, &token, &origin).await +} + +async fn sub_origin_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::create::start(ui, &url, &token, origin).await +} + +async fn sub_origin_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let to_json = m.is_present("TO_JSON"); + command::origin::info::start(ui, &url, &token, origin, to_json).await +} + +async fn sub_origin_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::delete::start(ui, &url, &token, origin).await +} + +async fn sub_origin_transfer_ownership(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let account = required_value_of(m, "NEW_OWNER_ACCOUNT"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::transfer::start(ui, &url, &token, origin, account).await +} + +async fn sub_origin_depart(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::depart::start(ui, &url, &token, origin).await +} + +async fn sub_accept_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::accept::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_ignore_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::ignore::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_list_user_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::list_user::start(ui, &url, &token).await +} + +async fn sub_list_pending_origin_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::list_pending_origin::start(ui, &url, origin, &token).await +} + +async fn sub_rescind_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::rescind::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_send_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitee_account = required_value_of(m, "INVITEE_ACCOUNT"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::send::start(ui, &url, origin, &token, invitee_account).await +} + +async fn sub_origin_member_role_show(ui: &mut UI, r: RbacShow) -> Result<()> { + let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; + let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; + command::origin::rbac::show_role::start(ui, + bldr_url, + r.origin.inner, + &auth_token, + &r.member_account, + r.to_json).await +} + +async fn sub_origin_member_role_set(ui: &mut UI, r: RbacSet) -> Result<()> { + let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; + let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; + command::origin::rbac::set_role::start(ui, + bldr_url, + r.origin.inner, + &auth_token, + &r.member_account, + r.role, + r.no_prompt).await +} + +fn sub_pkg_binlink(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let dest_dir = Path::new(required_value_of(m, "DEST_DIR")); + let force = m.is_present("FORCE"); + match m.value_of("BINARY") { + Some(binary) => { + command::pkg::binlink::start(ui, &ident, binary, dest_dir, &FS_ROOT_PATH, force) + } + None => { + command::pkg::binlink::binlink_all_in_pkg(ui, &ident, dest_dir, &FS_ROOT_PATH, force) + } + } +} + +/// Generate a (possibly empty) list of `Origin`s from the value of +/// the `HAB_ORIGIN_KEYS` environment variable / `--keys` argument. +fn hab_key_origins(m: &ArgMatches<'_>) -> Result> { + m.values_of("HAB_ORIGIN_KEYS") + .unwrap_or_default() + .map(|n| n.parse().map_err(Into::into)) + .collect() +} + +#[allow(unused_variables)] +async fn sub_pkg_build(ui: &mut UI, m: &ArgMatches<'_>, feature_flags: FeatureFlag) -> Result<()> { + let plan_context = required_value_of(m, "PLAN_CONTEXT"); + let root = m.value_of("HAB_STUDIO_ROOT"); + let src = m.value_of("SRC_PATH"); + let refresh_channel = m.value_of("REFRESH_CHANNEL"); + + let origins = hab_key_origins(m)?; + if !origins.is_empty() { + init()?; + let key_cache = key_cache_from_matches(m)?; + for origin in origins.iter() { + // Validate that a secret signing key is present on disk + // for each origin. + key_cache.latest_secret_origin_signing_key(origin)?; + } + } + + #[cfg(target_family = "unix")] + let native_package = if m.is_present("NATIVE_PACKAGE") { + if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { + return Err(Error::ArgumentError(String::from("`--native-package` is \ + only available when \ + `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ + is set"))); + } + true + } else { + false + }; + #[cfg(target_family = "windows")] + let native_package = false; + + let docker = m.is_present("DOCKER"); + let reuse = m.is_present("REUSE"); + + command::pkg::build::start(ui, + plan_context, + root, + src, + &origins, + native_package, + reuse, + docker, + refresh_channel).await +} + +fn sub_pkg_config(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + common::command::package::config::start(&ident, &*FS_ROOT_PATH)?; + Ok(()) +} + +fn sub_pkg_binds(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + common::command::package::binds::start(&ident, &*FS_ROOT_PATH)?; + Ok(()) +} + +fn sub_pkg_dependencies(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let scope = if m.is_present("TRANSITIVE") { + command::pkg::Scope::PackageAndDependencies + } else { + command::pkg::Scope::Package + }; + + let direction = if m.is_present("REVERSE") { + command::pkg::DependencyRelation::Supports + } else { + command::pkg::DependencyRelation::Requires + }; + command::pkg::dependencies::start(&ident, scope, direction, &FS_ROOT_PATH) +} + +async fn sub_pkg_download(ui: &mut UI, + m: &ArgMatches<'_>, + _feature_flags: FeatureFlag) + -> Result<()> { + let token = maybe_auth_token(m); + let url = bldr_url_from_matches(m)?; + let download_dir = download_dir_from_matches(m); + + // Construct flat file based inputs + let channel = channel_from_matches_or_default(m); + let target = target_from_matches(m)?; + + let install_sources = idents_from_matches(m)?; + + let mut package_sets = vec![PackageSet { target, + channel: channel.clone(), + idents: install_sources }]; + + let mut install_sources_from_file = idents_from_file_matches(ui, m, &channel, target)?; + package_sets.append(&mut install_sources_from_file); + package_sets.retain(|set| !set.idents.is_empty()); + + let verify = verify_from_matches(m); + let ignore_missing_seeds = ignore_missing_seeds_from_matches(m); + + init()?; + + command::pkg::download::start(ui, + &url, + PRODUCT, + VERSION, + &package_sets, + download_dir.as_ref(), + token.as_deref(), + verify, + ignore_missing_seeds).await?; + Ok(()) +} + +fn sub_pkg_env(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + command::pkg::env::start(&ident, &FS_ROOT_PATH) +} + +fn sub_pkg_hash(m: &ArgMatches<'_>) -> Result<()> { + init()?; + match m.value_of("SOURCE") { + Some(source) => { + // hash single file + command::pkg::hash::start(source) + } + None => { + // read files from stdin + let stdin = io::stdin(); + for line in stdin.lock().lines() { + let file = line?; + command::pkg::hash::start(file.trim_end())?; + } + Ok(()) + } + } +} + +async fn sub_pkg_uninstall(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let execute_strategy = if m.is_present("DRYRUN") { + command::pkg::ExecutionStrategy::DryRun + } else { + command::pkg::ExecutionStrategy::Run + }; + let mode = command::pkg::uninstall::UninstallMode::from(m); + let scope = if m.is_present("NO_DEPS") { + command::pkg::Scope::Package + } else { + command::pkg::Scope::PackageAndDependencies + }; + let excludes = excludes_from_matches(m); + let uninstall_hook_mode = if m.is_present("IGNORE_UNINSTALL_HOOK") { + UninstallHookMode::Ignore + } else { + UninstallHookMode::default() + }; + + command::pkg::uninstall::start(ui, + &ident, + &FS_ROOT_PATH, + execute_strategy, + mode, + scope, + &excludes, + uninstall_hook_mode).await +} + +async fn sub_bldr_channel_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + command::bldr::channel::create::start(ui, &url, &token, &origin, &channel).await +} + +async fn sub_bldr_channel_destroy(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + command::bldr::channel::destroy::start(ui, &url, &token, &origin, &channel).await +} + +async fn sub_bldr_channel_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let include_sandbox_channels = m.is_present("SANDBOX"); + command::bldr::channel::list::start(ui, &url, &origin, include_sandbox_channels).await +} + +async fn sub_bldr_channel_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let token = auth_token_param_or_env(m)?; + let source_channel = required_source_channel_from_matches(m); + let target_channel = required_target_channel_from_matches(m); + command::bldr::channel::promote::start(ui, + &url, + &token, + &origin, + &source_channel, + &target_channel).await +} + +async fn sub_bldr_channel_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let token = auth_token_param_or_env(m)?; + let source_channel = required_source_channel_from_matches(m); + let target_channel = required_target_channel_from_matches(m); + command::bldr::channel::demote::start(ui, + &url, + &token, + &origin, + &source_channel, + &target_channel).await +} + +#[allow(unused)] +async fn sub_bldr_job_start(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_cancel(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_promote_or_demote(ui: &mut UI, + m: &ArgMatches<'_>, + promote: bool) + -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_status(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +fn sub_plan_init(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let name = m.value_of("PKG_NAME").map(String::from); + let origin = origin_param_or_env(m)?; + let minimal = m.is_present("MIN"); + let scaffolding_ident = if cfg!(windows) { + match m.value_of("SCAFFOLDING") { + Some(scaffold) => Some(PackageIdent::from_str(scaffold)?), + None => None, + } + } else { + scaffolding::scaffold_check(ui, m.value_of("SCAFFOLDING"))? + }; + + command::plan::init::start(ui, &origin, minimal, scaffolding_ident, name) +} + +fn sub_plan_render(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let template_path = required_value_of(m, "TEMPLATE_PATH"); + let template_path = Path::new(template_path); + + let default_toml_path = required_value_of(m, "DEFAULT_TOML"); + let default_toml_path = Path::new(default_toml_path); + + let user_toml_path = m.value_of("USER_TOML").map(Path::new); + + let mock_data_path = m.value_of("MOCK_DATA").map(Path::new); + + let print = m.is_present("PRINT"); + let render = !m.is_present("NO_RENDER"); + let quiet = m.is_present("QUIET"); + + let render_dir = required_value_of(m, "RENDER_DIR"); + let render_dir = Path::new(render_dir); + + command::plan::render::start(ui, + template_path, + default_toml_path, + user_toml_path, + mock_data_path, + print, + render, + render_dir, + quiet) +} + +async fn sub_pkg_install(ui: &mut UI, + m: &ArgMatches<'_>, + feature_flags: FeatureFlag) + -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = channel_from_matches_or_default(m); + let install_sources = install_sources_from_matches(m)?; + let token = maybe_auth_token(m); + let install_mode = + if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && m.is_present("OFFLINE") { + InstallMode::Offline + } else { + InstallMode::default() + }; + + let local_package_usage = if m.is_present("IGNORE_LOCAL") { + LocalPackageUsage::Ignore + } else { + LocalPackageUsage::default() + }; + + let install_hook_mode = if m.is_present("IGNORE_INSTALL_HOOK") { + InstallHookMode::Ignore + } else { + InstallHookMode::default() + }; + + init()?; + + for install_source in install_sources.iter() { + let pkg_install = + common::command::package::install::start(ui, + &url, + &channel, + install_source, + PRODUCT, + VERSION, + &FS_ROOT_PATH, + &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), + token.as_deref(), + &install_mode, + &local_package_usage, + install_hook_mode).await?; + + if let Some(dest_dir) = binlink_dest_dir_from_matches(m) { + let force = m.is_present("FORCE"); + command::pkg::binlink::binlink_all_in_pkg(ui, + pkg_install.ident(), + &dest_dir, + &FS_ROOT_PATH, + force)?; + } + } + Ok(()) +} + +fn sub_pkg_path(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + command::pkg::path::start(&ident, &FS_ROOT_PATH) +} + +fn sub_pkg_list(m: &ArgMatches<'_>) -> Result<()> { + let listing_type = ListingType::from(m); + + command::pkg::list::start(&listing_type) +} + +fn sub_pkg_provides(m: &ArgMatches<'_>) -> Result<()> { + let filename = required_value_of(m, "FILE"); + + let full_releases = m.is_present("FULL_RELEASES"); + let full_paths = m.is_present("FULL_PATHS"); + + command::pkg::provides::start(filename, &FS_ROOT_PATH, full_releases, full_paths) +} + +async fn sub_pkg_search(m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let search_term = required_value_of(m, "SEARCH_TERM"); + let limit = required_value_of(m, "LIMIT").parse().expect("valid LIMIT"); + let token = maybe_auth_token(m); + command::pkg::search::start(search_term, &url, limit, token.as_deref()).await +} + +fn sub_pkg_sign(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = origin_param_or_env(m)?; + + let src = Path::new(required_value_of(m, "SOURCE")); + let dst = Path::new(required_value_of(m, "DEST")); + + let key_cache = key_cache_from_matches(m)?; + + init()?; + + let key = key_cache.latest_secret_origin_signing_key(&origin)?; + command::pkg::sign::start(ui, &key, src, dst) +} + +async fn sub_pkg_bulkupload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let upload_dir = bulkupload_dir_from_matches(m); + let artifact_path = upload_dir.join("artifacts"); + let key_path = upload_dir.join("keys"); + let key_cache = KeyCache::new(key_path); + key_cache.setup()?; + + let url = bldr_url_from_matches(m)?; + let additional_release_channel = channel_from_matches(m); + let force_upload = m.is_present("FORCE"); + let auto_build = if m.is_present("AUTO_BUILD") { + BuildOnUpload::PackageDefault + } else { + BuildOnUpload::Disable + }; + let auto_create_origins = m.is_present("AUTO_CREATE_ORIGINS"); + let token = auth_token_param_or_env(m)?; + + command::pkg::bulkupload::start(ui, + &url, + &additional_release_channel, + &token, + &artifact_path, + force_upload, + auto_build, + auto_create_origins, + &key_cache).await +} + +async fn sub_pkg_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let key_cache = key_cache_from_matches(m)?; + let url = bldr_url_from_matches(m)?; + + // When packages are uploaded, they *always* go to `unstable`; + // they can optionally get added to another channel, too. + let additional_release_channel = channel_from_matches(m); + + // When packages are uploaded we check if they exist in the db + // before allowing a write to the backend, this bypasses the check + let force_upload = m.is_present("FORCE"); + + let auto_build = if m.is_present("NO_BUILD") { + BuildOnUpload::Disable + } else { + BuildOnUpload::PackageDefault + }; + + let token = auth_token_param_or_env(m)?; + let artifact_paths = m.values_of("HART_FILE").unwrap(); // Required via clap + for artifact_path in artifact_paths.map(Path::new) { + command::pkg::upload::start(ui, + &url, + &additional_release_channel, + &token, + artifact_path, + force_upload, + auto_build, + &key_cache).await?; + } + Ok(()) +} + +async fn sub_pkg_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let ident = required_pkg_ident_from_input(m)?; + let target = target_from_matches(m)?; + + command::pkg::delete::start(ui, &url, (&ident, target), &token).await?; + + Ok(()) +} + +fn sub_pkg_verify(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::pkg::verify::start(ui, src, &key_cache) +} + +fn sub_pkg_header(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + init()?; + + command::pkg::header::start(ui, src) +} + +fn sub_pkg_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + let to_json = m.is_present("TO_JSON"); + init()?; + + command::pkg::info::start(ui, src, to_json) +} + +async fn sub_pkg_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + let target = target_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + command::pkg::promote::start(ui, &url, (&ident, target), &channel, &token).await +} + +async fn sub_pkg_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + let target = target_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + command::pkg::demote::start(ui, &url, (&ident, target), &channel, &token).await +} + +async fn sub_pkg_channels(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + let token = maybe_auth_token(m); + let target = target_from_matches(m)?; + + command::pkg::channels::start(ui, &url, (&ident, target), token.as_deref()).await +} + +async fn sub_svc_set(m: &ArgMatches<'_>) -> Result<()> { + let remote_sup_addr = remote_sup_from_input(m)?; + let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; + let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; + let mut ui = ui::ui(); + let mut validate = sup_proto::ctl::SvcValidateCfg { service_group: + Some(service_group.clone().into()), + ..Default::default() }; + let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_SVC_CFG_SIZE); + let cfg_len = match m.value_of("FILE") { + Some("-") | None => io::stdin().read_to_end(&mut buf)?, + Some(f) => { + let mut file = File::open(f)?; + file.read_to_end(&mut buf)? + } + }; + if cfg_len > sup_proto::butterfly::MAX_SVC_CFG_SIZE { + ui.fatal(format!("Configuration too large. Maximum size allowed is {} bytes.", + sup_proto::butterfly::MAX_SVC_CFG_SIZE))?; + process::exit(1); + } + validate.cfg = Some(buf.clone()); + let key_cache = key_cache_from_matches(m)?; + + let mut set = sup_proto::ctl::SvcSetCfg::default(); + match (service_group.org(), user_param_or_env(m)) { + (Some(_org), Some(username)) => { + let user_key = key_cache.latest_user_secret_key(&username)?; + let service_key = key_cache.latest_service_public_key(&service_group)?; + ui.status(Status::Encrypting, + format!("TOML as {} for {}", + user_key.named_revision(), + service_key.named_revision()))?; + set.cfg = Some(user_key.encrypt_for_service(&buf, &service_key) + .to_string() + .into_bytes()); + set.is_encrypted = Some(true); + } + _ => set.cfg = Some(buf.to_vec()), + } + set.service_group = Some(service_group.into()); + set.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); + ui.begin(format!("Setting new configuration version {} for {}", + set.version + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()), + set.service_group + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()),))?; + ui.status(Status::Creating, "service configuration")?; + let mut response = SrvClient::request(Some(&remote_sup_addr), validate).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + match ErrCode::try_from(m.code) { + Ok(ErrCode::InvalidPayload) => { + ui.warn(m)?; + } + _ => return Err(SrvClientError::from(m).into()), + } + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.status(Status::Applying, format!("via peer {}", remote_sup_addr))?; + let mut response = SrvClient::request(Some(&remote_sup_addr), set).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Applied configuration")?; + Ok(()) +} + +async fn sub_svc_config(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let remote_sup_addr = remote_sup_from_input(m)?; + let msg = sup_proto::ctl::SvcGetDefaultCfg { ident: Some(ident.into()), }; + let mut response = SrvClient::request(remote_sup_addr.as_ref(), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "ServiceCfg" => { + reply.parse::() + .map_err(SrvClientError::Decode)?; + } + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + Ok(()) +} + +async fn sub_svc_load(svc_load: SvcLoad) -> Result<()> { + let remote_sup_addr = svc_load.remote_sup.clone(); + let msg = habitat_sup_protocol::ctl::SvcLoad::try_from(svc_load)?; + gateway_util::send(remote_sup_addr.inner(), msg).await +} + +async fn sub_svc_bulk_load(svc_bulk_load: SvcBulkLoad) -> Result<()> { + let mut errors = HashMap::new(); + for svc_load in svc::svc_loads_from_paths(&svc_bulk_load.svc_config_paths)? { + let ident = svc_load.pkg_ident.clone().pkg_ident(); + if let Err(e) = sub_svc_load(svc_load).await { + errors.insert(ident, e); + } + } + if errors.is_empty() { + Ok(()) + } else { + Err(errors.into()) + } +} + +async fn sub_svc_unload(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let timeout_in_seconds = + parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); + let msg = sup_proto::ctl::SvcUnload { ident: Some(ident.into()), + timeout_in_seconds }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_svc_update(u: hab::cli::hab::svc::Update) -> Result<()> { + let ctl_addr = u.remote_sup.clone(); + let msg: sup_proto::ctl::SvcUpdate = TryFrom::try_from(u)?; + gateway_util::send(ctl_addr.inner(), msg).await +} + +async fn sub_svc_start(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let msg = sup_proto::ctl::SvcStart { ident: Some(ident.into()), }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_svc_status(pkg_ident: Option, + remote_sup: Option<&ResolvedListenCtlAddr>) + -> Result<()> { + let msg = sup_proto::ctl::SvcStatus { ident: pkg_ident.map(Into::into), }; + + let mut out = TabWriter::new(io::stdout()); + let mut response = SrvClient::request(remote_sup, msg).await?; + // Ensure there is at least one result from the server otherwise produce an error + if let Some(message_result) = response.next().await { + let reply = message_result?; + print_svc_status(&mut out, &reply, true)?; + } else { + return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()); + } + while let Some(message_result) = response.next().await { + let reply = message_result?; + print_svc_status(&mut out, &reply, false)?; + } + out.flush()?; + Ok(()) +} + +async fn sub_svc_stop(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let timeout_in_seconds = + parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); + let msg = sup_proto::ctl::SvcStop { ident: Some(ident.into()), + timeout_in_seconds }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_file_put(m: &ArgMatches<'_>) -> Result<()> { + let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; + let remote_sup_addr = remote_sup_from_input(m)?; + let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; + let mut ui = ui::ui(); + let mut msg = sup_proto::ctl::SvcFilePut::default(); + let file = Path::new(required_value_of(m, "FILE")); + if file.metadata()?.len() > sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES as u64 { + ui.fatal(format!("File too large. Maximum size allowed is {} bytes.", + sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES))?; + process::exit(1); + }; + msg.service_group = Some(service_group.clone().into()); + msg.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); + msg.filename = Some(file.file_name().unwrap().to_string_lossy().into_owned()); + let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES); + let key_cache = key_cache_from_matches(m)?; + + ui.begin(format!("Uploading file {} to {} incarnation {}", + file.display(), + msg.version + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()), + msg.service_group + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UKNOWN".to_string()),))?; + ui.status(Status::Creating, "service file")?; + File::open(file)?.read_to_end(&mut buf)?; + match (service_group.org(), user_param_or_env(m)) { + (Some(_org), Some(username)) => { + // That Some(_org) bit is really "was an org specified for + // this service group?" + let user_key = key_cache.latest_user_secret_key(&username)?; + let service_key = key_cache.latest_service_public_key(&service_group)?; + ui.status(Status::Encrypting, + format!("file as {} for {}", + user_key.named_revision(), + service_key.named_revision()))?; + msg.content = Some(user_key.encrypt_for_service(&buf, &service_key) + .to_string() + .into_bytes()); + msg.is_encrypted = Some(true); + } + _ => msg.content = Some(buf.to_vec()), + } + ui.status(Status::Applying, format!("via peer {}", remote_sup_addr)) + .unwrap(); + let mut response = SrvClient::request(Some(&remote_sup_addr), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + match ErrCode::try_from(m.code) { + Ok(ErrCode::InvalidPayload) => { + ui.warn(m)?; + } + _ => return Err(SrvClientError::from(m).into()), + } + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Uploaded file")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +async fn sub_sup_depart(member_id: String, + remote_sup: Option<&ResolvedListenCtlAddr>) + -> Result<()> { + let remote_sup = SrvClient::ctl_addr(remote_sup)?; + let mut ui = ui::ui(); + let msg = sup_proto::ctl::SupDepart { member_id: Some(member_id), }; + + ui.begin(format!("Permanently marking {} as departed", + msg.member_id.as_deref().unwrap_or("UNKNOWN"))) + .unwrap(); + ui.status(Status::Applying, format!("via peer {}", remote_sup)) + .unwrap(); + let mut response = SrvClient::request(Some(&remote_sup), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Departure recorded.")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +async fn sub_sup_restart(remote_sup: Option<&ResolvedListenCtlAddr>) -> Result<()> { + let remote_sup = SrvClient::ctl_addr(remote_sup)?; + let mut ui = ui::ui(); + let msg = sup_proto::ctl::SupRestart::default(); + + ui.begin(format!("Restarting supervisor {}", remote_sup))?; + let mut response = SrvClient::request(Some(&remote_sup), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Restart recorded.")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +fn sub_sup_secret_generate() -> Result<()> { + let mut ui = ui::ui(); + let mut buf = String::new(); + sup_proto::generate_secret_key(&mut buf); + ui.info(buf)?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +fn sub_sup_secret_generate_key(subject_alternative_name: &DnsName, path: PathBuf) -> Result<()> { + Ok(ctl_gateway_tls::generate_self_signed_certificate_and_key(subject_alternative_name, path) + .map_err(habitat_core::Error::from)?) +} + +fn sub_supportbundle(ui: &mut UI) -> Result<()> { + init()?; + + command::supportbundle::start(ui) +} + +fn sub_ring_key_export(m: &ArgMatches<'_>) -> Result<()> { + let ring = required_value_of(m, "RING"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::ring::key::export::start(ring, &key_cache) +} + +fn sub_ring_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ring = required_value_of(m, "RING"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::ring::key::generate::start(ui, ring, &key_cache) +} + +fn sub_ring_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let mut content = String::new(); + let key_cache = key_cache_from_matches(m)?; + init()?; + io::stdin().read_to_string(&mut content)?; + + // Trim the content to lose line feeds added by Powershell pipeline + command::ring::key::import::start(ui, content.trim(), &key_cache) +} + +fn sub_service_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let org = org_param_or_env(m)?; + let service_group = required_value_of(m, "SERVICE_GROUP").parse()?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::service::key::generate::start(ui, &org, &service_group, &key_cache) +} + +fn sub_user_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let user = required_value_of(m, "USER"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::user::key::generate::start(ui, user, &key_cache) +} + +fn args_after_first(args_to_skip: usize) -> Vec { + env::args_os().skip(args_to_skip).collect() +} + +/// Check to see if the user has passed in an AUTH_TOKEN param. If not, check the +/// HAB_AUTH_TOKEN env var. If not, check the CLI config to see if there is a default auth +/// token set. If that's empty too, then error. +fn auth_token_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("AUTH_TOKEN") { + Some(o) => Ok(o.to_string()), + None => { + match henv::var(AUTH_TOKEN_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + CliConfig::load()?.auth_token.ok_or_else(|| { + Error::ArgumentError("No auth token \ + specified" + .into()) + }) + } + } + } + } +} + +/// Check to see if an auth token exists and convert it to a string slice if it does. Unlike +/// auth_token_param_or_env, it's ok for no auth token to be present here. This is useful for +/// commands that can optionally take an auth token for operating on private packages. +fn maybe_auth_token(m: &ArgMatches<'_>) -> Option { + match auth_token_param_or_env(m) { + Ok(t) => Some(t), + Err(_) => None, + } +} + +/// Check to see if the user has passed in an ORIGIN param. If not, check the HABITAT_ORIGIN env +/// var. If not, check the CLI config to see if there is a default origin set. If that's empty too, +/// then error. +// TODO (CM): sort out types better... there's a conflict with the CLI +// Origin in this module +fn origin_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("ORIGIN") { + Some(o) => Ok(o.parse()?), + None => { + match henv::var(ORIGIN_ENVVAR) { + Ok(v) => Ok(v.parse()?), + Err(_) => { + CliConfig::load()?.origin.ok_or_else(|| { + Error::CryptoCLI("No origin specified".to_string()) + }) + } + } + } + } +} + +/// Check to see if the user has passed in an ORG param. +/// If not, check the HABITAT_ORG env var. If that's +/// empty too, then error. +fn org_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("ORG") { + Some(o) => Ok(o.to_string()), + None => henv::var(HABITAT_ORG_ENVVAR) + .map_err(|_| Error::CryptoCLI("No organization specified".to_string())), + } +} + +/// Check to see if the user has passed in a Builder URL param. If not, check the HAB_BLDR_URL env +/// var. If not, check the CLI config to see if there is a default url set. If that's empty too, +/// then we'll use the default (https://bldr.habitat.sh). +fn bldr_url_from_matches(matches: &ArgMatches<'_>) -> Result { + match matches.value_of("BLDR_URL") { + Some(url) => Ok(url.to_string()), + None => { + match henv::var(BLDR_URL_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + let config = CliConfig::load()?; + match config.bldr_url { + Some(v) => Ok(v), + None => Ok(default_bldr_url()), + } + } + } + } + } +} + +/// Resolve a channel. Taken from the environment or from CLI args, if +/// given. +fn channel_from_matches(matches: &ArgMatches<'_>) -> Option { + matches.value_of("CHANNEL").map(ChannelIdent::from) +} + +/// Resolve a channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + channel_from_matches(matches).unwrap() +} + +/// Resolve a target channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_target_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + matches.value_of("TARGET_CHANNEL") + .map(ChannelIdent::from) + .expect("TARGET_CHANNEL is a required argument!") +} + +/// Resolve a source channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_source_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + matches.value_of("SOURCE_CHANNEL") + .map(ChannelIdent::from) + .expect("SOURCE_CHANNEl is a required argument!") +} +/// Resolve a channel. Taken from the environment or from CLI args, if +/// given or return the default channel value. +fn channel_from_matches_or_default(matches: &ArgMatches<'_>) -> ChannelIdent { + channel_from_matches(matches).unwrap_or_else(ChannelIdent::configured_value) +} + +/// Resolve a target. Default to x86_64-linux if none specified +fn target_from_matches(matches: &ArgMatches<'_>) -> Result { + matches.value_of("PKG_TARGET") + .map(PackageTarget::from_str) + .unwrap_or_else(|| Ok(active_target())) + .map_err(Error::HabitatCore) +} + +/// Return the path to create our binlinks in, or None if no binlinking should occur +fn binlink_dest_dir_from_matches(matches: &ArgMatches<'_>) -> Option { + // is_present always returns true since BINLINK_DIR has a default value, so we need to use + // occurrences_of to determine whether we actually want to do the binlinking + if matches.is_present("BINLINK") || matches.occurrences_of("BINLINK_DIR") > 0 { + matches.value_of("BINLINK_DIR").map(PathBuf::from) + } else { + None + } +} + +/// Helper function to determine active package target. +/// It overrides x86_64-darwin to be x86_64-linux in order +/// to provide a better user experience (ie, for the 99% case) +fn active_target() -> PackageTarget { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } +} + +fn install_sources_from_matches(matches: &ArgMatches<'_>) -> Result> { + matches + .values_of("PKG_IDENT_OR_ARTIFACT") + .unwrap() // Required via clap + .map(|t| t.parse().map_err(Error::from)) + .collect() +} + +fn idents_from_matches(matches: &ArgMatches<'_>) -> Result> { + match matches.values_of("PKG_IDENT") { + Some(ident_strings) => { + ident_strings.map(|t| PackageIdent::from_str(t).map_err(Error::from)) + .collect() + } + _ => Ok(Vec::new()), // It's not an error to have no idents on command line + } +} + +fn idents_from_file_matches(ui: &mut UI, + matches: &ArgMatches<'_>, + cli_channel: &ChannelIdent, + cli_target: PackageTarget) + -> Result> { + let mut sources: Vec = Vec::new(); + + if let Some(files) = matches.values_of("PKG_IDENT_FILE") { + for f in files { + let filename = &f.to_string(); + if habitat_common::cli::is_toml_file(filename) { + let mut package_sets = idents_from_toml_file(ui, filename)?; + sources.append(&mut package_sets) + } else { + let idents_from_file = habitat_common::cli::file_into_idents(filename)?; + let package_set = PackageSet { idents: idents_from_file, + channel: cli_channel.clone(), + target: cli_target, }; + sources.push(package_set) + } + } + } + Ok(sources) +} + +fn idents_from_toml_file(ui: &mut UI, filename: &str) -> Result> { + let mut sources: Vec = Vec::new(); + + let file_data = std::fs::read_to_string(filename)?; + let toml_data: PackageSetFile = + toml::from_str(&file_data).map_err(habitat_common::Error::TomlParser)?; + + // We currently only accept version 1 + if toml_data.format_version.unwrap_or(1) != 1 { + return Err(Error::PackageSetParseError(format!( + "format_version invalid, only version 1 allowed ({} provided", + toml_data.format_version.unwrap() + ))); + } + + ui.status(Status::Using, + format!("File {}, '{}'", + filename, + toml_data.file_descriptor.unwrap_or_default()))?; + + for (target, target_array) in toml_data.targets { + for package_set_value in target_array { + let channel = package_set_value.channel; + let idents: Vec = strings_to_idents(&package_set_value.packages)?; + let package_set = PackageSet { target, + channel, + idents }; + debug!("Package Set {:?}", package_set); + sources.push(package_set) + } + } + Ok(sources) +} + +fn strings_to_idents(strings: &[String]) -> Result> { + let ident_or_results: Result> = + strings.iter() + .map(|s| PackageIdent::from_str(s).map_err(Error::from)) + .collect(); + ident_or_results +} + +fn verify_from_matches(matches: &ArgMatches<'_>) -> bool { matches.is_present("VERIFY") } +fn ignore_missing_seeds_from_matches(matches: &ArgMatches<'_>) -> bool { + matches.is_present("IGNORE_MISSING_SEEDS") +} + +fn download_dir_from_matches(matches: &ArgMatches<'_>) -> Option { + matches.value_of("DOWNLOAD_DIRECTORY").map(PathBuf::from) +} + +fn excludes_from_matches(matches: &ArgMatches<'_>) -> Vec { + matches + .values_of("EXCLUDE") + .unwrap_or_default() + .map(|i| PackageIdent::from_str(i).unwrap()) // unwrap safe as we've validated the input + .collect() +} + +fn print_svc_status(out: &mut T, + reply: &SrvMessage, + print_header: bool) + -> result::Result<(), SrvClientError> + where T: io::Write +{ + let status = match reply.message_id() { + "ServiceStatus" => { + reply.parse::() + .map_err(SrvClientError::Decode)? + } + "NetOk" => { + println!("No services loaded."); + return Ok(()); + } + "NetErr" => { + let err = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(err)); + } + _ => { + warn!("Unexpected status message, {:?}", reply); + return Ok(()); + } + }; + let svc_desired_state = status.desired_state + .map_or("".to_string(), |s| s.to_string()); + let (svc_state, svc_pid, svc_elapsed) = { + match status.process { + Some(process) => { + (process.state.to_string(), + process.pid + .map_or_else(|| "".to_string(), |p| p.to_string()), + process.elapsed.unwrap_or_default().to_string()) + } + None => { + (ProcessState::default().to_string(), "".to_string(), "".to_string()) + } + } + }; + if print_header { + writeln!(out, "{}", STATUS_HEADER.join("\t")).unwrap(); + } + // Composites were removed in 0.75 but people could be + // depending on the exact format of this output even if they + // never used composites. We don't want to break their tooling + // so we hardcode in 'standalone' as it's the only supported + // package type + // + // TODO: Remove this when we have a stable machine-readable alternative + // that scripts could depend on + writeln!(out, + "{}\tstandalone\t{}\t{}\t{}\t{}\t{}", + status.ident, + DesiredState::from_str(&svc_desired_state)?, + ProcessState::from_str(&svc_state)?, + svc_elapsed, + svc_pid, + status.service_group,)?; + Ok(()) +} + +fn bulkupload_dir_from_matches(matches: &ArgMatches<'_>) -> PathBuf { + matches.value_of("UPLOAD_DIRECTORY") + .map(PathBuf::from) + .expect("CLAP-validated upload dir") +} + +fn remote_sup_from_input(m: &ArgMatches<'_>) -> Result> { + Ok(m.value_of("REMOTE_SUP") + .map(ResolvedListenCtlAddr::from_str) + .transpose()?) +} + +fn required_pkg_ident_from_input(m: &ArgMatches<'_>) -> Result { + Ok(m.value_of("PKG_IDENT") + .expect("PKG_IDENT is a required argument") + .parse()?) +} + +/// Check to see if the user has passed in a USER param. +/// If not, check the HAB_USER env var. If that's +/// empty too, then return an error. +fn user_param_or_env(m: &ArgMatches<'_>) -> Option { + match m.value_of("USER") { + Some(u) => Some(u.to_string()), + None => { + match env::var(HABITAT_USER_ENVVAR) { + Ok(v) => Some(v), + Err(_) => None, + } + } + } +} + +/// Helper function to get information about the argument given its name +fn required_value_of<'a>(matches: &'a ArgMatches<'a>, name: &str) -> &'a str { + matches.value_of(name) + .unwrap_or_else(|| panic!("{} CLAP required arg missing", name)) +} + +#[cfg(test)] +mod test { + use super::*; + + mod binlink_dest_dir_from_matches { + use super::*; + + habitat_core::locked_env_var!(HAB_BINLINK_DIR, lock_binlink_env_var); + + #[test] + fn no_binlink_arg() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none(), + "without a --binlink arg, there should be no BINLINK matches"); + } + + #[test] + fn env_var_but_no_binlink_arg() { + let env_var = lock_binlink_env_var(); + env_var.set("/val/from/env/var"); + + assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none()); + } + + #[test] + #[should_panic(expected = "Invalid value")] + fn env_var_empty() { + let env_var = lock_binlink_env_var(); + env_var.set(""); + + dest_dir_from_pkg_install(&["origin/pkg"]); + } + + #[test] + fn env_var_overrides_binlink_default() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink"]), + Some(env_var_val.into()), + "with a no-value --binlink arg, the env var value should override the \ + default"); + } + + #[test] + fn binlink_dir_implies_binlink() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink-dir", arg_val]), + Some(arg_val.into())); + } + + #[test] + fn arg_val_overrides_default() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", + "--binlink", + "--binlink-dir", + arg_val]), + Some(arg_val.into()), + "The --binlink value should override the default"); + } + + #[test] + fn arg_val_overrides_env_var() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", + "--binlink", + "--binlink-dir", + arg_val]), + Some(arg_val.into()), + "The --binlink value should override the env var value"); + } + + #[test] + fn binlink_before_pkg_ident_ok() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), + Some(habitat_common::cli::DEFAULT_BINLINK_DIR.into())); + } + + #[test] + fn binlink_before_pkg_ident_with_env_var_ok() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), + Some(env_var_val.into())); + } + + fn matches_for_pkg_install<'a>(pkg_install_args: &'a [&'a str]) -> ArgMatches<'a> { + let pre_pkg_install_args = &["hab", "pkg", "install"]; + let app_matches = cli::get(FeatureFlag::empty()) + .get_matches_from_safe(pre_pkg_install_args.iter().chain(pkg_install_args.iter())) + .unwrap(); // Force panics on CLAP errors, so we can use #[should_panic] + match app_matches.subcommand() { + ("pkg", Some(matches)) => { + match matches.subcommand() { + ("install", Some(m)) => { + println!("{:#?}", m); + m.clone() + } + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + + fn dest_dir_from_pkg_install(pkg_install_args: &[&str]) -> Option { + let pkg_install_matches = &matches_for_pkg_install(pkg_install_args); + binlink_dest_dir_from_matches(pkg_install_matches) + } + } +} diff --git a/components/hab/src/main_v4.rs b/components/hab/src/main_v4.rs new file mode 100644 index 0000000000..23034fe897 --- /dev/null +++ b/components/hab/src/main_v4.rs @@ -0,0 +1,24 @@ +#[cfg(feature = "v4")] +use habitat_common::{ui::{UIWriter, + UI}, + FeatureFlag}; + +#[cfg(feature = "v4")] +use hab::cli_driver; + +#[cfg(feature = "v4")] +pub(crate) async fn main_v4() { + let mut ui = UI::default_with_env(); + let features = FeatureFlag::from_env(&mut ui); + if let Err(e) = cli_driver(&mut ui, features).await { + let exit_code = e.exit_code(); + ui.fatal(e).unwrap(); + std::process::exit(exit_code) + } +} + +// Hack required for now to have this compile when v4 is not enabled +#[cfg(not(feature = "v4"))] +pub(crate) async fn main_v4() { + unreachable!(); +} diff --git a/components/launcher/habitat/plan.sh b/components/launcher/habitat/plan.sh index d2f3bd3334..214818f836 100644 --- a/components/launcher/habitat/plan.sh +++ b/components/launcher/habitat/plan.sh @@ -30,7 +30,7 @@ do_prepare() { # Can be either `--release` or `--debug` to determine cargo build strategy build_line "Building artifacts with \`${cargo_build_mode#--}' mode" - export rustc_target="x86_64-unknown-linux-gnu" + export rustc_target="${pkg_target%%-*}-unknown-linux-gnu" build_line "Setting rustc_target=$rustc_target" # Used by Cargo to use a pristine, isolated directory for all compilation diff --git a/components/pkg-cfize/README.md b/components/pkg-cfize/README.md deleted file mode 100644 index 126d292fb1..0000000000 --- a/components/pkg-cfize/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# CF Exporter - -Cloud Foundry is a Platform as a Service used to run 12 factor apps. - -## Exporter -To export a habitat package to run on Cloud Foundry you can run: -``` -$ hab pkg export cf / -``` - -This will create 2 docker images. First it will run the docker exporter and then it will create an image based off of that one with additional layers to handle mapping from the Cloud Foundry environment to the Habitat native configuration file. The CF version of the docker image will have `cf-` as a prefix to the image tag. - -``` -$ docker images -starkandwayne/redmine cf-3.4.2-20170921100414 39d89fc95ca6 16 hours ago 553MB -starkandwayne/redmine 3.4.2-20170921100414 9b9a155ece00 16 hours ago 549MB -``` - -## Mapping File -The mapping file is a toml file that can add Bash interpolated variables and scripts. The Bash code will have access to: -- all environment variables -- the jq binary -- a few helper methods - -cf-mapping.toml -``` -port = "${PORT}" -[db] -user = "$(service "redmine-pg" '.credentials.username')" -password = "$(service "redmine-pg" '.credentials.password')" -host = "$(service "redmine-pg" '.credentials.host')" -name = "$(service "redmine-pg" '.credentials.database')" -``` - -## Helpers - -The helper methods are designed to extract information from the standard Cloud Foundry environment variables `VCAP_SERVICES` and `VCAP_APPLICATION`. - -Helpers: -- `service ` will extract the JSON associated with the given service-name from the `VCAP_SERVICES` environment variable and apply the jq-expression to it. -- `application ` will apply the jq-expression to the `VCAP_APPLICATION` environment variable diff --git a/components/pkg-cfize/bin/hab-pkg-cfize.sh b/components/pkg-cfize/bin/hab-pkg-cfize.sh deleted file mode 100755 index 992907ba02..0000000000 --- a/components/pkg-cfize/bin/hab-pkg-cfize.sh +++ /dev/null @@ -1,168 +0,0 @@ -#!/bin/bash -# -# # Usage -# -# ``` -# $ hab-pkg-cfize [PKG] [MAPPING] -# ``` -# -# # Synopsis -# -# Create a Docker container from a set of Habitat packages. - -# Fail if there are any unset variables and whenever a command returns a -# non-zero exit code. -set -eu - -# If the variable `$DEBUG` is set, then print the shell commands as we execute. -if [ -n "${DEBUG:-}" ]; then - set -x - export DEBUG -fi - -# ## Help - -# **Internal** Prints help -print_help() { - echo -- "$program $version - -$author - -Habitat Package CFize - Create a Cloud Foundry ready Docker image from a given package. - -USAGE: - $program -" -} - -# **Internal** Exit the program with an error message and a status code. -# -# ```sh -# exit_with "Something bad went down" 55 -# ``` -exit_with() { - if [ "${HAB_NOCOLORING:-}" = "true" ]; then - echo -- "ERROR: $1" - else - case "${TERM:-}" in - *term | xterm-* | rxvt | screen | screen-*) - printf -- "\033[1;31mERROR: \033[1;37m%s\033[0m\n" "$1" - ;; - *) - printf -- "ERROR: %s\n" "$1" - ;; - esac - fi - exit "$2" -} - -dockerize_tags() { - local docker_output_file="$1" - grep tagged "$docker_output_file" | awk '{ print $3}' -} - -sh_shebang() { - local docker_output_file="$1" - echo "#!$(grep ENV "$docker_output_file" | \ - tr ":" "\n" | \ - grep busybox-static | \ - head -n1)/sh" -} - -build_cf_image() { - local hab_package=${1} - local mapping=${2} - - tmp_dir="$(mktemp -t -d "${program}-XXXX")" - - dockerize_out="${tmp_dir}/dockerize-out" - hab-pkg-export-container "${hab_package}" | tee "${dockerize_out}" - - docker_tag_array=$(dockerize_tags "${dockerize_out}") - cf_docker_tag_array=("${docker_tag_array[@]/:/:cf-}") - - DOCKER_CONTEXT=${tmp_dir}/docker - mkdir -p "${DOCKER_CONTEXT}" - render_helpers > "${DOCKER_CONTEXT}"/helpers.sh - - cat < "$DOCKER_CONTEXT"/cf-init.sh -$(sh_shebang "${dockerize_out}") -source /helpers.sh -( echo "cat <~/user.toml"; - cat /config.toml; - echo "EOF"; -) >~/render.sh -. ~/render.sh -mv ~/user.toml /hab/svc/$(basename "${hab_package}")/user.toml -exec /init.sh "\$@" -EOT - chmod +x "$DOCKER_CONTEXT"/cf-init.sh - - cat "${mapping}" > "${DOCKER_CONTEXT}"/config.toml - cat < "$DOCKER_CONTEXT"/Dockerfile -FROM ${docker_tag_array[0]} -RUN hab pkg install core/jq-static -ADD cf-init.sh / -ADD helpers.sh / -ADD config.toml / -ENTRYPOINT ["/cf-init.sh"] -CMD ["start", "$1"] -EOT - - docker build --force-rm --no-cache "${cf_docker_tag_array[@]/#/-t }" "${DOCKER_CONTEXT}" - rm -rf "${tmp_dir}" -} - -render_helpers() { - cat < "$bin" -#!$(pkg_path_for bash)/bin/bash -set -e -if test -n "\$DEBUG"; then set -x; fi - -export PATH="$_runtime_path:\${PATH:-}" - -exec ${bin}.real \$@ -EOF - chmod -v 755 "$bin" -} diff --git a/components/pkg-export-container/habitat/plan.sh b/components/pkg-export-container/habitat/plan.sh index 805bcfe311..d9e22a27c1 100644 --- a/components/pkg-export-container/habitat/plan.sh +++ b/components/pkg-export-container/habitat/plan.sh @@ -58,7 +58,15 @@ do_before() { do_prepare() { _common_prepare - export rustc_target="x86_64-unknown-linux-musl" + # With the musl target, the ring crate is looking for aarch64-linux-musl-gcc, + # but the core/musl package provides musl-gcc. This workaround is necessary until the appropriate changes are made to core/musl for aarch64. + if [[ "${pkg_target%%-*}" == "aarch64" ]]; then + if [[ ! -r "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" ]]; then + ln -sv "$(pkg_path_for musl)/bin/musl-gcc" "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" + fi + fi + + export rustc_target="${pkg_target%%-*}-unknown-linux-musl" build_line "Setting rustc_target=$rustc_target" # Used to find libgcc_s.so.1 when compiling `build.rs` in dependencies. Since @@ -89,12 +97,12 @@ do_prepare() { do_build() { pushd "$PLAN_CONTEXT" || exit - cargo build ${build_type#--debug} --target=$rustc_target --verbose + cargo build ${build_type#--debug} --target="$rustc_target" --verbose popd || exit } do_install() { - install -v -D "$CARGO_TARGET_DIR"/$rustc_target/${build_type#--}/$bin \ + install -v -D "$CARGO_TARGET_DIR"/"$rustc_target"/${build_type#--}/$bin \ "$pkg_prefix"/bin/$bin } diff --git a/components/pkg-export-tar/habitat/plan.sh b/components/pkg-export-tar/habitat/plan.sh index 41ab3b9da1..8bb4093e46 100644 --- a/components/pkg-export-tar/habitat/plan.sh +++ b/components/pkg-export-tar/habitat/plan.sh @@ -52,7 +52,15 @@ do_before() { do_prepare() { _common_prepare - export rustc_target="x86_64-unknown-linux-musl" + # With the musl target, the ring crate is looking for aarch64-linux-musl-gcc, + # but the core/musl package provides musl-gcc. This workaround is necessary until the appropriate changes are made to core/musl for aarch64. + if [[ "${pkg_target%%-*}" == "aarch64" ]]; then + if [[ ! -r "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" ]]; then + ln -sv "$(pkg_path_for musl)/bin/musl-gcc" "$(pkg_path_for musl)/bin/aarch64-linux-musl-gcc" + fi + fi + + export rustc_target="${pkg_target%%-*}-unknown-linux-musl" build_line "Setting rustc_target=$rustc_target" # Used to find libgcc_s.so.1 when compiling `build.rs` in dependencies. Since @@ -74,11 +82,11 @@ do_prepare() { do_build() { pushd "$PLAN_CONTEXT" || exit - cargo build ${build_type#--debug} --target=$rustc_target --verbose + cargo build ${build_type#--debug} --target="$rustc_target" --verbose popd || exit } do_install() { - install -v -D "$CARGO_TARGET_DIR"/$rustc_target/${build_type#--}/$bin \ + install -v -D "$CARGO_TARGET_DIR"/"$rustc_target"/${build_type#--}/$bin \ "$pkg_prefix"/bin/$bin } diff --git a/components/pkg-export-tar/src/lib.rs b/components/pkg-export-tar/src/lib.rs index a4be3c6364..a662d47ffc 100644 --- a/components/pkg-export-tar/src/lib.rs +++ b/components/pkg-export-tar/src/lib.rs @@ -40,7 +40,7 @@ async fn export_for_cli_matches(ui: &mut UI, cli: &cli::Cli) -> Result<()> { async fn export(ui: &mut UI, build_spec: BuildSpec<'_>) -> Result<()> { let hab_pkg = build_spec.hab; - let build_result = build_spec.create(ui).await.unwrap(); + let build_result = build_spec.create(ui).await?; let builder_dir_path = build_result.0.path(); let pkg_ident = build_result.1; diff --git a/components/pkg-mesosize/bin/hab-pkg-mesosize.sh b/components/pkg-mesosize/bin/hab-pkg-mesosize.sh deleted file mode 100755 index cdae8e2cf7..0000000000 --- a/components/pkg-mesosize/bin/hab-pkg-mesosize.sh +++ /dev/null @@ -1,179 +0,0 @@ -#!/bin/bash -# -# # Usage -# -# ``` -# $ hab-pkg-mesosize [PKG ...] -# ``` -# -# # Synopsis -# -# Create a Mesos application from a set of Habitat packages. - -# defaults for the application -: "${CPU:="0.5"}" -: "${DISK:="0"}" -: "${INSTANCES:="1"}" -: "${MEM:="256"}" -: "${PKG:="unknown"}" - -# Fail if there are any unset variables and whenever a command returns a -# non-zero exit code. -set -eu - -# If the variable `$DEBUG` is set, then print the shell commands as we execute. -if [ -n "${DEBUG:-}" ]; then - set -x - export DEBUG -fi - -# ## Help - -# **Internal** Prints help -print_help() { - echo -- "$program $version - -$author - -Habitat Package Mesosize - Create a Mesos application from a set of Habitat packages - -USAGE: - $program [FLAGS] [OPTIONS] - -FLAGS: - --help Prints help information - -OPTIONS: - --cpu=N CPUs for the application (float, .5 is default) - --disk=N Disk Space for the application (0 is default) - --instances=N Number of application instances to launch (1 is default) - --mem=N Memory for the application (MB, 256 is default) - -ARGS: - Habitat package identifier (ex: acme/redis) -" -} - -# **Internal** Exit the program with an error message and a status code. -# -# ```sh -# exit_with "Something bad went down" 55 -# ``` -exit_with() { - if [ "${HAB_NOCOLORING:-}" = "true" ]; then - printf -- "ERROR: %s\n" "$1" - else - case "${TERM:-}" in - *term | xterm-* | rxvt | screen | screen-*) - printf -- "\033[1;31mERROR: \033[1;37m%s\033[0m\n" "$1" - ;; - *) - printf -- "ERROR: %s\n" "$1" - ;; - esac - fi - exit "$2" -} - -find_system_commands() { - if mktemp --version 2>&1 | grep -q 'GNU coreutils'; then - _mktemp_cmd=$(command -v mktemp) - else - if /bin/mktemp --version 2>&1 | grep -q 'GNU coreutils'; then - _mktemp_cmd=/bin/mktemp - else - exit_with "We require GNU mktemp to build Mesos applications; aborting" 1 - fi - fi -} - -# parse the CLI flags and options -parse_options() { - for i in "$@" - do - case $i in - --help) - print_help - exit - ;; - --cpu=*) - CPU="${i#*=}" - shift - ;; - --disk=*) - DISK="${i#*=}" - shift - ;; - --instances=*) - INSTANCES="${i#*=}" - shift - ;; - --mem=*) - MEM="${i#*=}" - shift - ;; - *) - PKG=${i} - ;; - esac - done - if [ "$PKG" == "unknown" ]; then - print_help - exit_with "You must specify one or more Habitat packages to Mesosize." 1 - fi -} - -# Create a hab studio baseimage and populate it with the application -build_tarball_image() { - TARBALL_CONTEXT="$($_mktemp_cmd -t -d "${program}-XXXX")" - pushd "$TARBALL_CONTEXT" > /dev/null - env PKGS="$PKG" NO_MOUNT=1 hab studio -r "$TARBALL_CONTEXT" -t bare new - echo "$PKG" > "$TARBALL_CONTEXT"/.hab_pkg - popd > /dev/null - tar -czpf "$(package_name_with_version "$PKG")".tgz -C "$TARBALL_CONTEXT" ./ -} - -package_name_with_version() { - local ident_file - ident_file=$(find "$TARBALL_CONTEXT"/"$HAB_ROOT_PATH"/pkgs/"$PKG" -name IDENT) - awk 'BEGIN { FS = "/" }; { print $1 "-" $2 "-" $3 "-" $4 }' < "$ident_file" -} - -# https://mesosphere.github.io/marathon/docs/application-basics.html -create_application_definition() { - echo " - { - \"id\": \"$PKG\", - \"cmd\": \"/bin/id -u hab &>/dev/null || /sbin/useradd hab; /bin/chown -R hab:hab *; mount -t proc proc proc/; mount -t sysfs sys sys/;mount -o bind /dev dev/; /usr/sbin/chroot . ./init.sh start $PKG\", - \"cpus\": $CPU, - \"disk\": $DISK, - \"mem\": $MEM, - \"instances\": $INSTANCES, - \"uris\": [ \"URL_TO_$(package_name_with_version "$PKG").tgz\" ] - } -" - # what about exposing ports? - } - -# The root of the filesystem. If the program is running on a separate -# filesystem or chroot environment, this environment variable may need to be -# set. -: "${FS_ROOT:=}" -# The root path of the Habitat file system. If the `$HAB_ROOT_PATH` environment -# variable is set, this value is overridden, otherwise it is set to its default -: "${HAB_ROOT_PATH:=$FS_ROOT/hab}" - -# The current version of Habitat Studio -version='@version@' -# The author of this program -author='@author@' -# The short version of the program name which is used in logging output -program=$(basename "$0") - -find_system_commands - -parse_options "$@" -build_tarball_image -# publish the tarball somewhere? upload_tarball_to_artifact_store? -create_application_definition -rm -rf "$TARBALL_CONTEXT" diff --git a/components/pkg-mesosize/habitat/plan.sh b/components/pkg-mesosize/habitat/plan.sh deleted file mode 100644 index 2c3974d8cc..0000000000 --- a/components/pkg-mesosize/habitat/plan.sh +++ /dev/null @@ -1,42 +0,0 @@ -# shellcheck disable=2154 -pkg_name=hab-pkg-mesosize -pkg_origin=core -pkg_maintainer="The Habitat Maintainers " -pkg_license=('Apache-2.0') -pkg_deps=(core/coreutils - core/findutils - core/gawk - core/grep - core/bash - core/tar - core/gzip - core/hab) -pkg_build_deps=() -pkg_bin_dirs=(bin) - -program=$pkg_name - -pkg_version() { - cat "$SRC_PATH/../../VERSION" -} - -do_before() { - do_default_before - update_pkg_version -} - -do_build() { - cp -v "$SRC_PATH"/bin/${program}.sh ${program} - - # Use the bash from our dependency list as the shebang. Also, embed the - # release version of the program. - sed \ - -e "s,#!/bin/bash$,#!$(pkg_path_for bash)/bin/bash," \ - -e "s,@author@,$pkg_maintainer,g" \ - -e "s,@version@,$pkg_version/$pkg_release,g" \ - -i $program -} - -do_install() { - install -v -D $program "$pkg_prefix"/bin/$program -} diff --git a/components/plan-build-ps1/bin/hab-plan-build.ps1 b/components/plan-build-ps1/bin/hab-plan-build.ps1 index e1b2aad04d..4fd4f0928f 100644 --- a/components/plan-build-ps1/bin/hab-plan-build.ps1 +++ b/components/plan-build-ps1/bin/hab-plan-build.ps1 @@ -85,7 +85,7 @@ if (!(Test-Path Env:\HAB_BLDR_CHANNEL)) { if (!(Test-Path Env:\HAB_FALLBACK_CHANNEL)) { $env:HAB_FALLBACK_CHANNEL = "stable" } -# Use the refresh channel for dependencies in the core/chef/chef-platform origins +# Use the refresh channel for dependencies in the core origin if (!(Test-Path Env:\HAB_REFRESH_CHANNEL)) { $env:HAB_REFRESH_CHANNEL = "stable" } @@ -335,25 +335,57 @@ function Set-HabBin { function Install-Dependency($dependency, $install_args = $null) { if (!$env:NO_INSTALL_DEPS) { + $oldEncoding = [Console]::OutputEncoding + [Console]::OutputEncoding = [System.Text.Encoding]::UTF8 $origin = $dependency.Split("/")[0] $channel = $env:HAB_BLDR_CHANNEL $ignoreLocal = "" - if ($origin -eq "core" -or $origin -eq "chef" -or $origin -eq "chef-platform") { + if ($origin -eq "core") { $channel="$env:HAB_REFRESH_CHANNEL" if (!$env:HAB_PREFER_LOCAL_CHEF_DEPS) { $ignoreLocal="--ignore-local" } } $cmd = "$HAB_BIN pkg install -u $env:HAB_BLDR_URL --channel $channel $dependency $install_args $ignoreLocal" - Invoke-Expression $cmd + $res = Invoke-Expression $cmd | Out-String if ($LASTEXITCODE -ne 0 -and ($channel -ne $env:HAB_FALLBACK_CHANNEL)) { Write-BuildLine "Trying to install '$dependency' from '$env:HAB_FALLBACK_CHANNEL'" $cmd = "$HAB_BIN pkg install -u $env:HAB_BLDR_URL --channel $env:HAB_FALLBACK_CHANNEL $dependency $install_args $ignoreLocal" - Invoke-Expression $cmd + $res = Invoke-Expression $cmd | Out-String } + Write-Host $res + [Console]::OutputEncoding = $oldEncoding + if($res.Split("`n")[-2] -match "\S+/\S+") { + $Matches[0] + } else { + "" + } + } else { + $(__resolve_full_ident $dependency) } } +# **Internal** From hab-auto-build, we set the specific packages to be installed using +# the environment variable HAB_STUDIO_INSTALL_PKGS before building. This helper function resolves +# the given dependency to the identifier installed from HAB_STUDIO_INSTALL_PKGS. +function __resolve_full_ident($dep) { + if (-not [string]::IsNullOrEmpty($env:HAB_STUDIO_INSTALL_PKGS)) { + $transformedDep = $dep -replace '/', '-' + $paths = $env:HAB_STUDIO_INSTALL_PKGS -split ";" + foreach ($path in $paths) { + if ($path -match "$transformedDep-(.*)-(.*)-$pkg_target.hart") { + $version = $matches[1] + $timestamp = $matches[2] + + $ident = "$dep/$version/$timestamp" + return $ident + } + } + } + + return "" +} + # **Internal** Returns (on stdout) the `DEPS` file contents of another locally # installed package which contain the set of all direct run dependencies. An # empty set could be returned as whitespace and/or newlines. The lack of a @@ -629,14 +661,7 @@ function Resolve-ScaffoldingDependencyList { if($pkg_scaffolding) { $pkg_scaffolding = @($pkg_scaffolding)[0] - $oldEncoding = [Console]::OutputEncoding - [Console]::OutputEncoding = [System.Text.Encoding]::UTF8 - $res = Install-Dependency $pkg_scaffolding | Out-String - Write-Host $res - [Console]::OutputEncoding = $oldEncoding - if($res.Split("`n")[-2] -match "\S+/\S+") { - $resolved = $Matches[0] - } + $resolved = Install-Dependency $pkg_scaffolding # Add scaffolding package to the list of scaffolding build deps $scaff_build_deps += $pkg_scaffolding if($resolved) { @@ -720,14 +745,7 @@ function Resolve-BuildDependencyList { # Build `${pkg_build_deps_resolved[@]}` containing all resolved direct build # dependencies. foreach($dep in $pkg_build_deps) { - $oldEncoding = [Console]::OutputEncoding - [Console]::OutputEncoding = [System.Text.Encoding]::UTF8 - $res = Install-Dependency $dep | Out-String - Write-Host $res - [Console]::OutputEncoding = $oldEncoding - if($res.Split("`n")[-2] -match "\S+/\S+") { - $resolved = $Matches[0] - } + $resolved = Install-Dependency $dep if($resolved) { Write-BuildLine "Resolved build dependency '$dep' to $resolved" $script:pkg_build_deps_resolved+=(Resolve-Path "$HAB_PKG_PATH/$resolved").Path @@ -743,14 +761,7 @@ function Resolve-RunDependencyList { # Build `${pkg_deps_resolved[@]}` containing all resolved direct run # dependencies. foreach($dep in $pkg_deps) { - $oldEncoding = [Console]::OutputEncoding - [Console]::OutputEncoding = [System.Text.Encoding]::UTF8 - $res = Install-Dependency $dep --ignore-install-hook | Out-String - Write-Host $res - [Console]::OutputEncoding = $oldEncoding - if($res.Split("`n")[-2] -match "\S+/\S+") { - $resolved = $Matches[0] - } + $resolved = Install-Dependency $dep --ignore-install-hook if ($resolved) { Write-BuildLine "Resolved dependency '$dep' to $resolved" $script:pkg_deps_resolved+=(Resolve-Path "$HAB_PKG_PATH/$resolved").Path diff --git a/components/plan-build/bin/hab-plan-build-linux.sh b/components/plan-build/bin/hab-plan-build-linux.sh index a54b7a306e..97e45be2bf 100755 --- a/components/plan-build/bin/hab-plan-build-linux.sh +++ b/components/plan-build/bin/hab-plan-build-linux.sh @@ -365,10 +365,10 @@ export HAB_BLDR_CHANNEL # Also note that this only really comes into play if HAB_BLDR_CHANNEL # has been set to something different. : "${HAB_FALLBACK_CHANNEL=stable}" -# Use the refresh channel for dependencies in the core/chef/chef-platform origins +# Use the refresh channel for dependencies in the core origin : "${HAB_REFRESH_CHANNEL:=stable}" -# If we prefer to use local core/chef/chef-platform deps then a locally installed -# package in one of these origins will be used in preference to what is in the refresh +# If we prefer to use local core deps then a locally installed +# package in this origin will be used in preference to what is in the refresh : "${HAB_PREFER_LOCAL_CHEF_DEPS:=false}" # The value of `$PATH` on initial start of this program INITIAL_PATH="$PATH" @@ -692,7 +692,7 @@ _install_dependency() { local channel="$HAB_BLDR_CHANNEL" if [[ -z "${NO_INSTALL_DEPS:-}" ]]; then origin="$(echo "$dep" | cut -d "/" -f 1)" - if [[ $origin == "core" || $origin == "chef" || $origin == "chef-platform" ]]; then + if [[ $origin == "core" ]]; then channel="$HAB_REFRESH_CHANNEL" if [[ $HAB_PREFER_LOCAL_CHEF_DEPS == "false" ]]; then IGNORE_LOCAL="--ignore-local" diff --git a/components/studio/bin/hab-studio-linux.sh b/components/studio/bin/hab-studio-linux.sh index 575efbb2f6..09296ef625 100755 --- a/components/studio/bin/hab-studio-linux.sh +++ b/components/studio/bin/hab-studio-linux.sh @@ -1040,6 +1040,18 @@ chown_certs() { fi } +# **Internal** Mimic delay using busy loop +# We cannot use the sleep command as we have already unmounted, but we are +# encountering 'device busy' failures on AArch64 Linux. We need this because +# we unmounted the resource and want to allow some time for it to be freed. +busy_sleep() { + duration="$1" + end_time=$(( $(date +%s) + duration )) + while [ "$(date +%s)" -lt "$end_time" ]; do + : # No-op, keeps the loop running + done +} + # **Internal** Unmount mount point if mounted and abort if an unmount is # unsuccessful. # @@ -1055,15 +1067,30 @@ umount_fs() { # Filesystem is confirmed umounted, return success return 0 else + # TODO: The retry mechanism with an increasing delay has been added + # to address potential race conditions: if the `umount` operation + # is performed asynchronously, the filesystem might still be reported + # as mounted during the retries while the unmounting is in progress. + # By incrementally increasing the delay between retries (starting with + # a 5-second delay and increasing with each attempt), we aim to account + # for such races and give the system more time to process the unmount + # operation. This approach provides a balance between responsiveness + # and allowing sufficient time for the unmount process to complete. + # If this still impacts user experience, further adjustments such as + # dynamic retry intervals or enhanced detection mechanisms could be + # explored. + RETRY_DELAY=5 + MAX_RETRIES=5 + i=1 + while [ "$i" -le "$MAX_RETRIES" ] + do + busy_sleep $((RETRY_DELAY * i)) # Delay increases with each retry + if ! is_fs_mounted "$_mount_point"; then + return 0 + fi + i=$((i+1)) + done # Despite a successful umount, filesystem is still mounted - # - # TODO fn: there may a race condition here: if the `umount` is - # performed asynchronously then it might still be reported as mounted - # when the umounting is still queued up. We're erring on the side of - # catching any possible races here to determine if there's a problem or - # not. If this unduly impacts user experience then an alternate - # approach is to wait/poll until the filesystem is unmounted (with a - # deadline to abort). >&2 echo "After unmounting filesystem '$_mount_point', the mount \ persisted. Check that the filesystem is no longer in the mounted using \ \`mount(8)'and retry the last command." diff --git a/components/studio/bin/hab-studio.ps1 b/components/studio/bin/hab-studio.ps1 index 5aab8a2ea9..b6092c8b24 100644 --- a/components/studio/bin/hab-studio.ps1 +++ b/components/studio/bin/hab-studio.ps1 @@ -521,7 +521,8 @@ function Remove-Studio { } else { if(Test-Path $HAB_STUDIO_ROOT) { Write-HabInfo "Destroying Studio at $HAB_STUDIO_ROOT" - Remove-Item $HAB_STUDIO_ROOT -Recurse -Force + Get-ChildItem $HAB_STUDIO_ROOT -Recurse | Remove-Item -Force -Recurse + Remove-Item $HAB_STUDIO_ROOT } } } diff --git a/components/sup/habitat/plan.sh b/components/sup/habitat/plan.sh index 238f4c900f..fb59115ce9 100644 --- a/components/sup/habitat/plan.sh +++ b/components/sup/habitat/plan.sh @@ -57,7 +57,7 @@ do_before() { do_prepare() { _common_prepare - export rustc_target="x86_64-unknown-linux-gnu" + export rustc_target="${pkg_target%%-*}-unknown-linux-gnu" build_line "Setting rustc_target=$rustc_target" export LIBZMQ_PREFIX=$(pkg_path_for zeromq) @@ -91,13 +91,13 @@ do_build() { export LIBRARY_PATH=$LIBZMQ_PREFIX/lib pushd "$SRC_PATH" > /dev/null || exit - cargo build ${build_type#--debug} --target=$rustc_target --verbose --no-default-features \ + cargo build ${build_type#--debug} --target="$rustc_target" --verbose --no-default-features \ --features apidocs popd > /dev/null || exit } do_install() { - install -v -D "$CARGO_TARGET_DIR"/$rustc_target/${build_type#--}/$bin \ + install -v -D "$CARGO_TARGET_DIR"/"$rustc_target"/${build_type#--}/$bin \ "$pkg_prefix"/bin/$bin } diff --git a/components/sup/src/manager/service/hooks.rs b/components/sup/src/manager/service/hooks.rs index 385d374fa4..1433c5c419 100644 --- a/components/sup/src/manager/service/hooks.rs +++ b/components/sup/src/manager/service/hooks.rs @@ -732,8 +732,9 @@ mod tests { types::{GossipListenAddr, HttpListenAddr, ListenCtlAddr}}; - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), - all(target_os = "windows", target_arch = "x86_64"))))] + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),)))] use habitat_core::package::metadata::MetaFile; use habitat_core::{crypto::keys::KeyCache, fs::CACHE_KEY_PATH, @@ -797,8 +798,8 @@ mod tests { PathBuf::from("/tmp"), PathBuf::from("/tmp")); // Platforms without standard package support require all packages to be native packages - // TODO: This is currently also needed on aarch64-linux until we publish official packages - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"))))] { tokio::fs::create_dir_all(pkg_install.installed_path()).await diff --git a/components/sup/static/plan.sh b/components/sup/static/plan.sh index 5248d5642a..59dedae4ba 100644 --- a/components/sup/static/plan.sh +++ b/components/sup/static/plan.sh @@ -13,11 +13,11 @@ do_begin() { PLAN_CONTEXT=$(abspath ..) } -# shellcheck disable=2155 +# shellcheck disable=2155,SC2154 do_prepare() { _common_prepare - export rustc_target="x86_64-unknown-linux-musl" + export rustc_target="${pkg_target%%-*}-unknown-linux-musl" build_line "Setting rustc_target=$rustc_target" # Used to find libgcc_s.so.1 when compiling `build.rs` in dependencies. Since diff --git a/components/sup/tests/utils/fs.rs b/components/sup/tests/utils/fs.rs index e81c9b8ef7..6d15ad639b 100644 --- a/components/sup/tests/utils/fs.rs +++ b/components/sup/tests/utils/fs.rs @@ -365,11 +365,8 @@ async fn write_default_metafiles(hab_root: &HabRoot, // Write metafiles to convert the package to a native package on platforms without package // support - // TODO: We intentionally use native packages on aarch64-linux platform as the interpreter - // packages core/busybox-static are not yet available publicly on builder. This will cause - // The package to fail when the supervisor attempts to load it. We should use standard - // packages on aarch64-linux once the interpreter package is made publicly available. - #[cfg(not(any(all(target_os = "linux", any(target_arch = "x86_64")), + #[cfg(not(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), all(target_os = "windows", target_arch = "x86_64"))))] { let pkg_type_metafile = hab_root.pkg_type_path(pkg_origin, pkg_name); diff --git a/support/validation/x86_64-linux-kernel2/Vagrantfile b/support/validation/x86_64-linux-kernel2/Vagrantfile deleted file mode 100644 index 9a0d510ed3..0000000000 --- a/support/validation/x86_64-linux-kernel2/Vagrantfile +++ /dev/null @@ -1,35 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : -Vagrant.configure("2") do |config| - config.vm.box = "bento/centos-6.10" - config.vm.box_check_update = false - - config.vm.provider "virtualbox" do |vb| - vb.memory = "2048" - end - - config.vm.provision "shell", inline: <<-SHELL - yum install -y wget curl git - - # You'll need this for running most services - adduser hab - - # You'll want this if you want to build a plan - git clone https://github.com/habitat-sh/core-plans.git - - # You'll also want the keys for building for whatever origin you - # choose. - # - # It expects you to have HAB_ORIGIN and HAB_AUTH_TOKEN set in your - # environment when you provision the VM. - # - # Additionally, it will install from the *staging* channel unless - # you specifically override this with the INSTALL_CHANNEL environment - # variable. This is the default because this VM is mostly used for - # manual evaluation of release candidates from that channel. - curl https://raw.githubusercontent.com/habitat-sh/habitat/main/components/hab/install.sh -o install.sh - bash install.sh -t x86_64-linux-kernel2 -c #{ENV.fetch('INSTALL_CHANNEL', 'staging')} - sudo hab license accept - sudo -u vagrant hab origin key download #{ENV['HAB_ORIGIN']} --secret --auth=#{ENV['HAB_AUTH_TOKEN']} - SHELL -end diff --git a/test-services/test-probe/habitat/aarch64-linux/config/config.toml b/test-services/test-probe/habitat/aarch64-linux/config/config.toml new file mode 100755 index 0000000000..4dd3616399 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/config/config.toml @@ -0,0 +1,3 @@ +host = "{{cfg.host}}" +port = {{cfg.port}} +render_context_file = "{{cfg.render_context_file}}" diff --git a/test-services/test-probe/habitat/aarch64-linux/config/render_context.json b/test-services/test-probe/habitat/aarch64-linux/config/render_context.json new file mode 100755 index 0000000000..92cf97f92f --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/config/render_context.json @@ -0,0 +1,7 @@ +{ + "sys": {{toJson sys}}, + "pkg": {{toJson pkg}}, + "cfg": {{toJson cfg}}, + "svc": {{toJson svc}}, + "bind": {{toJson bind}} +} diff --git a/test-services/test-probe/habitat/aarch64-linux/default.toml b/test-services/test-probe/habitat/aarch64-linux/default.toml new file mode 100755 index 0000000000..39442ac6a2 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/default.toml @@ -0,0 +1,3 @@ +host = "0.0.0.0" +port = 8000 +render_context_file = "/hab/svc/test-probe/config/render_context.json" diff --git a/test-services/test-probe/habitat/aarch64-linux/health_exit b/test-services/test-probe/habitat/aarch64-linux/health_exit new file mode 100644 index 0000000000..c227083464 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/health_exit @@ -0,0 +1 @@ +0 \ No newline at end of file diff --git a/test-services/test-probe/habitat/aarch64-linux/hooks/health-check b/test-services/test-probe/habitat/aarch64-linux/hooks/health-check new file mode 100644 index 0000000000..b1a02df9e9 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/hooks/health-check @@ -0,0 +1,6 @@ +#!{{pkgPathFor "core/bash"}}/bin/bash + +echo "Running health_check hook: {{pkg.ident}} (PID: $$, PPID: $PPID, PGID: $(ps h -o pgid -p $$))" +sleep 2 +echo "health_check finished!" +exit $(cat {{pkg.path}}/health_exit) diff --git a/test-services/test-probe/habitat/aarch64-linux/hooks/init b/test-services/test-probe/habitat/aarch64-linux/hooks/init new file mode 100644 index 0000000000..3122e1580f --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/hooks/init @@ -0,0 +1,15 @@ +#!{{pkgPathFor "core/bash"}}/bin/bash + +echo "Initializing package {{pkg.ident}} (PID: $$, PPID: $PPID, PGID: $(ps h -o pgid -p $$))" +sleep 1 +echo "... reticulating splines ..." +echo "... integrating curves ..." +echo "... relaxing splines ..." +echo "... calculating inverse probability matrices ..." +sleep 1 +echo "Deliberately taking a long time in the init hook" +for i in {1..10}; do + sleep 1 + echo "Sleeping ($i)/10..." +done +echo "init hook DONE" diff --git a/test-services/test-probe/habitat/aarch64-linux/hooks/post-run b/test-services/test-probe/habitat/aarch64-linux/hooks/post-run new file mode 100644 index 0000000000..4f3f72ca5c --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/hooks/post-run @@ -0,0 +1,4 @@ +#!{{pkgPathFor "core/bash"}}/bin/bash + +echo "Running post-run script: {{pkg.ident}} (PID: $$, PPID: $PPID, PGID: $(ps h -o pgid -p $$))" +echo "Done" diff --git a/test-services/test-probe/habitat/aarch64-linux/hooks/post-stop b/test-services/test-probe/habitat/aarch64-linux/hooks/post-stop new file mode 100644 index 0000000000..84411ed399 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/hooks/post-stop @@ -0,0 +1,14 @@ +#!{{pkgPathFor "core/bash"}}/bin/bash + +byebye(){ + echo "Got a signal!" +} +trap byebye INT TERM + + +echo "Deliberately long post-stop hook executing: {{pkg.ident}} (PID: $$, PPID: $PPID, PGID: $(ps h -o pgid -p $$))" +for i in {1..15}; do + sleep 1 + echo "Sleeping ($i)/15..." +done +echo "post-stop hook DONE" diff --git a/test-services/test-probe/habitat/aarch64-linux/hooks/run b/test-services/test-probe/habitat/aarch64-linux/hooks/run new file mode 100644 index 0000000000..1005340ca9 --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/hooks/run @@ -0,0 +1,20 @@ +#!{{pkgPathFor "core/bash"}}/bin/bash + + +exec 2>&1 + +echo "Running {{pkg.ident}}" # (PID: $$, PPID: $PPID, PGID: $(ps h -o pgid -p $$))" +{{ #if bind.thing_with_a_port }} +echo "*************************************************************" +echo "Running with a bound service group for 'thing_with_a_port'" +{{ #each bind.thing_with_a_port.members as |m| ~}} +echo "- {{m.sys.hostname}}" +{{/each ~}} +echo "*************************************************************" +{{ else }} +echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" +echo "Running WITHOUT a bound service group for 'thing_with_a_port'" +echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" +{{ /if }} + +exec test-probe -c "{{pkg.svc_config_path}}/config.toml" diff --git a/test-services/test-probe/habitat/aarch64-linux/plan.sh b/test-services/test-probe/habitat/aarch64-linux/plan.sh new file mode 100644 index 0000000000..cfb9f84a0a --- /dev/null +++ b/test-services/test-probe/habitat/aarch64-linux/plan.sh @@ -0,0 +1,47 @@ +# shellcheck disable=2034,2154 +pkg_name=test-probe +pkg_origin=habitat-testing +pkg_version="0.1.0" +pkg_maintainer="The Habitat Maintainers " +pkg_license=("Apache-2.0") +pkg_bin_dirs=(bin) +pkg_deps=(core/glibc + core/gcc-libs + core/procps-ng + core/bash) +pkg_build_deps=(core/coreutils + core/rust + core/gcc + core/git + core/make) +pkg_binds_optional=( + [thing_with_a_port]="port" +) + +bin="test-probe" + +do_prepare() { + export rustc_target="aarch64-unknown-linux-gnu" + build_line "Setting rustc_target=$rustc_target" + + # Used by Cargo to use a pristine, isolated directory for all compilation + export CARGO_TARGET_DIR="$HAB_CACHE_SRC_PATH/$pkg_dirname" + build_line "Setting CARGO_TARGET_DIR=$CARGO_TARGET_DIR" +} + +do_build() { + ( + cd "$PLAN_CONTEXT"/.. || exit + cargo build --target="$rustc_target" --verbose + ) +} + +do_install() { + install -v -D "$CARGO_TARGET_DIR/$rustc_target/debug/$bin" \ + "$pkg_prefix/bin/$bin" + install -v -D "$PLAN_CONTEXT/health_exit" "$pkg_prefix" +} + +do_strip() { + return 0 +}