From 4dfad9c64db7ea1aa0359862a9ae9a8c05e34199 Mon Sep 17 00:00:00 2001 From: pengshanyu Date: Tue, 9 Jul 2024 22:25:43 +0800 Subject: [PATCH 1/3] add tier-0 test case Signed-off-by: pengshanyu delete blank Lines Signed-off-by: pengshanyu trim trailing whitespace Signed-off-by: pengshanyu update function name and implementation Signed-off-by: pengshanyu added:check /var on ostree Signed-off-by: pengshanyu rename old tier-0 to multi-bluechi-agents;rename sanity test to tier-0 Signed-off-by: pengshanyu trailing-whitespace Signed-off-by: pengshanyu remove old tier 0 Signed-off-by: pengshanyu split sanity test to 6 tests Signed-off-by: pengshanyu trailing-whitespace Signed-off-by: pengshanyu not number the tests;add main.fmf;manual_trigger multi-bluechi-agents Signed-off-by: pengshanyu correct typo Signed-off-by: pengshanyu add polarion id Signed-off-by: pengshanyu --- .packit.yaml | 21 +++++++++- plans/e2e/ffi.fmf | 11 ----- plans/e2e/multi-bluechi-agents.fmf | 34 ++++++++++++++++ plans/e2e/tier-0.fmf | 38 +++++------------- plans/e2e/tier-1.fmf | 2 +- plans/main.fmf | 19 +++++++++ tests/e2e/set-ffi-env-e2e | 11 +++++ tests/qm-connectivity/main.fmf | 4 +- tests/qm-sanity-test/README.md | 17 ++++++++ .../check_bluechi_controller_is_ok.fmf | 6 +++ .../check_bluechi_controller_is_ok.sh | 33 +++++++++++++++ tests/qm-sanity-test/check_bluechi_is_ok.fmf | 6 +++ tests/qm-sanity-test/check_bluechi_is_ok.sh | 27 +++++++++++++ tests/qm-sanity-test/check_qm_is_up.fmf | 6 +++ tests/qm-sanity-test/check_qm_is_up.sh | 18 +++++++++ .../qm-sanity-test/check_qm_podman_is_ok.fmf | 6 +++ tests/qm-sanity-test/check_qm_podman_is_ok.sh | 18 +++++++++ .../check_qm_podman_quadlet_is_ok.fmf | 6 +++ .../check_qm_podman_quadlet_is_ok.sh | 40 +++++++++++++++++++ .../check_var_partition_exist.fmf | 6 +++ .../check_var_partition_exist.sh | 26 ++++++++++++ tests/qm-sanity-test/main.fmf | 1 + tests/qm-unit-files/main.fmf | 4 +- 23 files changed, 313 insertions(+), 47 deletions(-) create mode 100644 plans/e2e/multi-bluechi-agents.fmf create mode 100644 plans/main.fmf create mode 100644 tests/qm-sanity-test/README.md create mode 100644 tests/qm-sanity-test/check_bluechi_controller_is_ok.fmf create mode 100644 tests/qm-sanity-test/check_bluechi_controller_is_ok.sh create mode 100644 tests/qm-sanity-test/check_bluechi_is_ok.fmf create mode 100644 tests/qm-sanity-test/check_bluechi_is_ok.sh create mode 100644 tests/qm-sanity-test/check_qm_is_up.fmf create mode 100644 tests/qm-sanity-test/check_qm_is_up.sh create mode 100644 tests/qm-sanity-test/check_qm_podman_is_ok.fmf create mode 100644 tests/qm-sanity-test/check_qm_podman_is_ok.sh create mode 100644 tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.fmf create mode 100644 tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.sh create mode 100644 tests/qm-sanity-test/check_var_partition_exist.fmf create mode 100644 tests/qm-sanity-test/check_var_partition_exist.sh create mode 100644 tests/qm-sanity-test/main.fmf diff --git a/.packit.yaml b/.packit.yaml index 156d5173..51b9af1d 100644 --- a/.packit.yaml +++ b/.packit.yaml @@ -58,10 +58,12 @@ jobs: - job: tests trigger: pull_request - identifier: e2e-tiers - tmt_plan: /plans/e2e/tier-0 + identifier: e2e-multi-bluechi-agents + tmt_plan: /plans/e2e/multi-bluechi-agents targets: - centos-stream-9-x86_64 + skip_build: true + manual_trigger: true tf_extra_params: environments: - artifacts: @@ -91,6 +93,21 @@ jobs: - size: ">= 20 GB" - size: ">= 20 GB" + - job: tests + trigger: pull_request + identifier: qm-tier-0 + tmt_plan: /plans/e2e/tier-0 + targets: + - centos-stream-9-x86_64 + tf_extra_params: + environments: + - artifacts: + - type: repository-file + id: https://copr.fedorainfracloud.org/coprs/g/centos-automotive-sig/bluechi-snapshot/repo/centos-stream-9 + hardware: + disk: + - size: ">= 20 GB" + - job: propose_downstream trigger: release update_release: false diff --git a/plans/e2e/ffi.fmf b/plans/e2e/ffi.fmf index fe63df5b..6a703bd0 100644 --- a/plans/e2e/ffi.fmf +++ b/plans/e2e/ffi.fmf @@ -2,7 +2,6 @@ summary: FFI - QM FreedomFromInterference environment: CONTROL_CONTAINER_NAME: host - FFI_SETUP_OPTIONS: none discover: how: fmf @@ -11,17 +10,7 @@ discover: provision: how: local -prepare: - - name: Install rpms - how: install - package: podman - adjust: - - when: run == manual - environment+: - # Sample code to use manual packit repo - PACKIT_COPR_PROJECT: packit/containers-qm-291 - - when: distro == centos-stream-9, fedora environment+: FFI_SETUP_OPTIONS: --set-qm-disk-part=yes diff --git a/plans/e2e/multi-bluechi-agents.fmf b/plans/e2e/multi-bluechi-agents.fmf new file mode 100644 index 00000000..d62615a1 --- /dev/null +++ b/plans/e2e/multi-bluechi-agents.fmf @@ -0,0 +1,34 @@ +summary: multiple bluechi-agents test - QM Interconnect through bluechi + +discover: + how: fmf + filter: tag:multi-bluechi-agents + +provision: + how: local + +adjust: + - when: distro == centos-stream-9 or distro == fedora + prepare+: + - name: Prepare Repos + how: shell + script: | + dnf install -y dnf-plugin-config-manager epel-release + dnf config-manager -y --set-enabled crb + + - name: install repos + how: install + package: + - podman + + - name: Set QM env + how: shell + script: | + cd tests/e2e + ./run-test-e2e --skip-tests=yes + +execute: + how: tmt + +report: + how: junit diff --git a/plans/e2e/tier-0.fmf b/plans/e2e/tier-0.fmf index 64bf801f..eb5b6184 100644 --- a/plans/e2e/tier-0.fmf +++ b/plans/e2e/tier-0.fmf @@ -1,39 +1,19 @@ -summary: Tier 0 - QM Interconnect through bluechi +summary: Tier 0 - QM sanity test discover: how: fmf filter: tier:0 -provision: - how: local - -adjust: - - when: run == manual - environment+: - # Sample code to use manual packit repo - PACKIT_COPR_PROJECT: packit/containers-qm-291 - - - when: distro == centos-stream-9 or distro == fedora - prepare+: - - name: Prepare Repos - how: shell - script: | - dnf install -y dnf-plugin-config-manager epel-release - dnf config-manager -y --set-enabled crb - - - name: install repos - how: install - package: - - podman - - - name: Set QM env - how: shell - script: | - cd tests/e2e - ./run-test-e2e --skip-tests=yes +prepare+: + - name: Set QM environment + how: shell + order: 50 + script: | + cd tests/e2e + ./set-ffi-env-e2e "${FFI_SETUP_OPTIONS}" execute: how: tmt report: - how: junit + how: junit \ No newline at end of file diff --git a/plans/e2e/tier-1.fmf b/plans/e2e/tier-1.fmf index 0ca9e1cd..7c2c7f0f 100644 --- a/plans/e2e/tier-1.fmf +++ b/plans/e2e/tier-1.fmf @@ -1,4 +1,4 @@ -summary: Tier 0 - QM Interconnect through bluechi +summary: Tier 1 - QM Interconnect through bluechi discover: how: fmf diff --git a/plans/main.fmf b/plans/main.fmf new file mode 100644 index 00000000..7a249bf3 --- /dev/null +++ b/plans/main.fmf @@ -0,0 +1,19 @@ +summary: general data used by the test plans + +environment: + FFI_SETUP_OPTIONS: none + PACKAGE: qm + PACKIT_COPR_PROJECT: release + +prepare: + - name: Install podman + how: install + order: 20 + package: + - podman + +adjust: + - when: run == manual + environment+: + # Sample code to use manual packit repo + PACKIT_COPR_PROJECT: packit/containers-qm-291 diff --git a/tests/e2e/set-ffi-env-e2e b/tests/e2e/set-ffi-env-e2e index 0f3cdcee..6e1902c3 100755 --- a/tests/e2e/set-ffi-env-e2e +++ b/tests/e2e/set-ffi-env-e2e @@ -228,6 +228,17 @@ cat > /etc/bluechi/agent.conf.d/00-default.conf << 'EOF' [bluechi-agent] NodeName=localrootfs EOF + +controller_host_ip=$(hostname -I | awk '{print $1}') +qm_bluechi_agent_config_file="/etc/qm/bluechi/agent.conf.d/agent.conf" +if [[ -f "${qm_bluechi_agent_config_file}" ]]; then + if ! grep "ControllerHost=${controller_host_ip}" "${qm_bluechi_agent_config_file}" >/dev/null; then + sed -i '$a \ControllerHost='"${controller_host_ip}"'' ${qm_bluechi_agent_config_file} + fi +else + echo "Configuration file not found: ${qm_bluechi_agent_config_file}" +fi + # Enable services info_message "Setup qm services, enable bluechi services" info_message "==============================" diff --git a/tests/qm-connectivity/main.fmf b/tests/qm-connectivity/main.fmf index 443f2c1f..00f294ef 100644 --- a/tests/qm-connectivity/main.fmf +++ b/tests/qm-connectivity/main.fmf @@ -1,7 +1,7 @@ -/tier0: +/multi-bluechi-agents: summary: Test is calling e2e/lib/tests as stand alone test test: ./test.sh - tier: 0 + tag: multi-bluechi-agents id: 7356c7cc-95aa-4a8e-9b02-2726f570add6 /tier1: summary: Test is calling e2e/lib/tests AutoSD container diff --git a/tests/qm-sanity-test/README.md b/tests/qm-sanity-test/README.md new file mode 100644 index 00000000..9c329e8e --- /dev/null +++ b/tests/qm-sanity-test/README.md @@ -0,0 +1,17 @@ +# QM sanity test + +This test suite do some basic sanity tests for qm to confirm that qm has been installed properly and the qm environment has been started successfully. + +## This Test Suite includes these tests + +1. Confirm that bluechi-controller is active and bluechi-agent is online. + +2. Confirm that host and QM bluechi-agent are connected to bluechi-controller. + +3. Confirm that qm is up and running. + +4. Confirm that podman in qm is ok. + +5. Confirm that podman run and exec container in qm with service file successfully. + +6. Confirm that /var partition exist. diff --git a/tests/qm-sanity-test/check_bluechi_controller_is_ok.fmf b/tests/qm-sanity-test/check_bluechi_controller_is_ok.fmf new file mode 100644 index 00000000..2b300c17 --- /dev/null +++ b/tests/qm-sanity-test/check_bluechi_controller_is_ok.fmf @@ -0,0 +1,6 @@ +summary: Test bluechi-controller is active and bluechi-agent is online +test: /bin/bash ./check_bluechi_controller_is_ok.sh +duration: 10m +tier: 0 +framework: shell +id: 44d1f92b-2885-49d3-900a-23cde296c9d8 diff --git a/tests/qm-sanity-test/check_bluechi_controller_is_ok.sh b/tests/qm-sanity-test/check_bluechi_controller_is_ok.sh new file mode 100644 index 00000000..a844778b --- /dev/null +++ b/tests/qm-sanity-test/check_bluechi_controller_is_ok.sh @@ -0,0 +1,33 @@ +#!/bin/bash -euvx + +# shellcheck disable=SC1091 +source ../e2e/lib/utils + + +# Verify bluechi-controller is up and bluechictl is ok +check_bluechi_controller_is_ok(){ + if [ "$(systemctl is-active bluechi-controller)" == "active" ]; then + info_message "check_bluechi_controller_is_ok(): bluechi-controller is active." + info_message "PASS: check_bluechi_controller_is_ok()" + else + info_message "FAIL: check_bluechi_controller_is_ok(): bluechi-controller is not active." + exit 1 + fi + + regex_qm_localrootfs="qm.localrootfs * \| online" + regex_ASIL_localrootfs="localrootfs * \| online" + if [[ ! "$(bluechictl status)" =~ ${regex_qm_localrootfs} ]]; then + info_message "FAIL: check_bluechi_controller_is_ok: Checking QM bluechi-agent online failed.\n $(bluechictl status)" + exit 1 + elif [[ ! "$(bluechictl status)" =~ ${regex_ASIL_localrootfs} ]]; then + info_message "FAIL: check_bluechi_controller_is_ok: Checking host bluechi-agent online failed.\n $(bluechictl status)" + exit 1 + else + info_message "check_bluechi_controller_is_ok: QM bluechi-agent is online." + info_message "check_bluechi_controller_is_ok: host bluechi-agent is online." + info_message "PASS: check_bluechi_controller_is_ok()" + exit 0 + fi +} + +check_bluechi_controller_is_ok \ No newline at end of file diff --git a/tests/qm-sanity-test/check_bluechi_is_ok.fmf b/tests/qm-sanity-test/check_bluechi_is_ok.fmf new file mode 100644 index 00000000..eb69517f --- /dev/null +++ b/tests/qm-sanity-test/check_bluechi_is_ok.fmf @@ -0,0 +1,6 @@ +summary: Test host and QM bluechi-agent are connected to controller +test: /bin/bash ./check_bluechi_is_ok.sh +duration: 10m +tier: 0 +framework: shell +id: 627318fa-b7bb-4d95-ab64-524911398a88 diff --git a/tests/qm-sanity-test/check_bluechi_is_ok.sh b/tests/qm-sanity-test/check_bluechi_is_ok.sh new file mode 100644 index 00000000..9d992f2b --- /dev/null +++ b/tests/qm-sanity-test/check_bluechi_is_ok.sh @@ -0,0 +1,27 @@ +#!/bin/bash -euvx + +# shellcheck disable=SC1091 +source ../e2e/lib/utils + + +# Verify bluechi nodes are connected +check_bluechi_is_ok(){ + bluechi_controller_status=$(systemctl status bluechi-controller | tail -2) + regex_ASIL_bluechi_agent="Registered managed node from fd [0-9]{1,2} as 'localrootfs'" + regex_QM_bluechi_agent="Registered managed node from fd [0-9]{1,2} as 'qm.localrootfs'" + + if [[ ! "${bluechi_controller_status}" =~ ${regex_ASIL_bluechi_agent} ]]; then + info_message "FAIL: check_bluechi_is_ok: host bluechi-agent is not connected to controller.\n ${bluechi_controller_status}" + exit 1 + elif [[ ! "${bluechi_controller_status}" =~ ${regex_QM_bluechi_agent} ]]; then + info_message "FAIL: check_bluechi_is_ok: QM bluechi-agent is not connected to controller.\n ${bluechi_controller_status}" + exit 1 + else + info_message "check_bluechi_is_ok: host bluechi-agent is connected to controller." + info_message "check_bluechi_is_ok: QM bluechi-agent is connected to controller." + info_message "PASS: check_bluechi_is_ok()" + exit 0 + fi +} + +check_bluechi_is_ok \ No newline at end of file diff --git a/tests/qm-sanity-test/check_qm_is_up.fmf b/tests/qm-sanity-test/check_qm_is_up.fmf new file mode 100644 index 00000000..473c05e7 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_is_up.fmf @@ -0,0 +1,6 @@ +summary: Test qm is up and running +test: /bin/bash ./check_qm_is_up.sh +duration: 10m +tier: 0 +framework: shell +id: b82a5766-275b-4635-9c2a-7ab3d8c6dc05 diff --git a/tests/qm-sanity-test/check_qm_is_up.sh b/tests/qm-sanity-test/check_qm_is_up.sh new file mode 100644 index 00000000..59cd6dd4 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_is_up.sh @@ -0,0 +1,18 @@ +#!/bin/bash -euvx + +# shellcheck disable=SC1091 +source ../e2e/lib/utils + +# Verify qm is up and running +check_qm_is_up(){ + if [ "$(systemctl is-active qm)" == "active" ]; then + info_message "check_qm_is_up(): qm is active" + info_message "PASS: check_qm_is_up()" + exit 0 + else + info_message "FAIL: check_qm_is_up(): qm is not active" + exit 1 + fi +} + +check_qm_is_up \ No newline at end of file diff --git a/tests/qm-sanity-test/check_qm_podman_is_ok.fmf b/tests/qm-sanity-test/check_qm_podman_is_ok.fmf new file mode 100644 index 00000000..1279cdd6 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_podman_is_ok.fmf @@ -0,0 +1,6 @@ +summary: Test podman in qm is ok +test: /bin/bash ./check_qm_podman_is_ok.sh +duration: 10m +tier: 0 +framework: shell +id: ac4aac07-094b-4d22-b5f6-b85bc39da119 diff --git a/tests/qm-sanity-test/check_qm_podman_is_ok.sh b/tests/qm-sanity-test/check_qm_podman_is_ok.sh new file mode 100644 index 00000000..5a0462e7 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_podman_is_ok.sh @@ -0,0 +1,18 @@ +#!/bin/bash -euvx + +# shellcheck disable=SC1091 +source ../e2e/lib/utils + +# Verify podman in qm is ok +check_qm_podman_is_ok(){ + if podman exec qm bash -c "podman info" > /dev/null; then + info_message "check_qm_podman_is_ok(): check 'podman info' in qm successfully." + info_message "PASS: check_qm_podman_is_ok()" + exit 0 + else + info_message "FAIL: check_qm_podman_is_ok(): check 'podman info' in qm failed.\n $(podman exec qm bash -c "podman info")" + exit 1 + fi +} + +check_qm_podman_is_ok \ No newline at end of file diff --git a/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.fmf b/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.fmf new file mode 100644 index 00000000..c6c62d03 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.fmf @@ -0,0 +1,6 @@ +summary: Test podman run and exec container in qm with service file +test: /bin/bash ./check_qm_podman_quadlet_is_ok.sh +duration: 10m +tier: 0 +framework: shell +id: b2071703-f9d4-4945-aa2d-0cf3abeaecb7 diff --git a/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.sh b/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.sh new file mode 100644 index 00000000..01ba58c3 --- /dev/null +++ b/tests/qm-sanity-test/check_qm_podman_quadlet_is_ok.sh @@ -0,0 +1,40 @@ +#!/bin/bash -euvx + +# shellcheck disable=SC1091 +source ../e2e/lib/utils + +# Verify podman run and exec container inside qm with service file +check_qm_podman_quadlet_is_ok(){ + info_message "check_qm_podman_quadlet_is_ok(): \ + prepare quadlet files for qm-sanity-test.container" + cat > "/etc/qm/containers/systemd/qm-sanity-test.container" < /dev/null 2>&1; then + expected_var_partition="part /var" + # /var on c9s image + else + expected_var_partition="part /usr/lib/qm/rootfs/var" + fi + + if [[ "$(lsblk -o 'MAJ:MIN,TYPE,MOUNTPOINTS')" =~ ${expected_var_partition} ]]; then + info_message "check_var_partition_exist: /var exists in QM" + info_message "PASS: check_var_partition_exist()" + exit 0 + else + info_message "FAIL: check_var_partition_exist: /var does not exists in QM" + exit 1 + fi +} + +check_var_partition_exist \ No newline at end of file diff --git a/tests/qm-sanity-test/main.fmf b/tests/qm-sanity-test/main.fmf new file mode 100644 index 00000000..12349dd1 --- /dev/null +++ b/tests/qm-sanity-test/main.fmf @@ -0,0 +1 @@ +summary: QM sanity test diff --git a/tests/qm-unit-files/main.fmf b/tests/qm-unit-files/main.fmf index 4c40107d..93ecab6b 100644 --- a/tests/qm-unit-files/main.fmf +++ b/tests/qm-unit-files/main.fmf @@ -1,9 +1,9 @@ -/tier0: +/multi-bluechi-agents: environment: CONTROL_CONTAINER_NAME: node1 summary: Test is calling e2e/lib/tests as stand alone test test: ./test.sh - tier: 0 + tag: multi-bluechi-agents id: 68e8a9a8-a9f8-4c43-be58-71645d09bc5c /tier1: summary: Test is testing AutoSD container From 3792942f43ada92e3c5fa3b6114137d8ea859a88 Mon Sep 17 00:00:00 2001 From: pengshanyu Date: Wed, 24 Jul 2024 11:43:14 +0800 Subject: [PATCH 2/3] remove PACKIT_COPR_PROJECT Signed-off-by: pengshanyu --- plans/main.fmf | 2 -- 1 file changed, 2 deletions(-) diff --git a/plans/main.fmf b/plans/main.fmf index 7a249bf3..ab3f82f7 100644 --- a/plans/main.fmf +++ b/plans/main.fmf @@ -2,8 +2,6 @@ summary: general data used by the test plans environment: FFI_SETUP_OPTIONS: none - PACKAGE: qm - PACKIT_COPR_PROJECT: release prepare: - name: Install podman From f1ef96872c757b90dbb7ba7c069ea38ea379bef4 Mon Sep 17 00:00:00 2001 From: pengshanyu Date: Wed, 24 Jul 2024 16:22:41 +0800 Subject: [PATCH 3/3] remove skip_build Signed-off-by: pengshanyu --- .packit.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.packit.yaml b/.packit.yaml index 51b9af1d..4e13e190 100644 --- a/.packit.yaml +++ b/.packit.yaml @@ -62,7 +62,6 @@ jobs: tmt_plan: /plans/e2e/multi-bluechi-agents targets: - centos-stream-9-x86_64 - skip_build: true manual_trigger: true tf_extra_params: environments: