Skip to content

Commit

Permalink
Revert "[EBPF] Generate complexity data in CI (#26723)" (#27263)
Browse files Browse the repository at this point in the history
  • Loading branch information
brycekahle authored Jul 2, 2024
1 parent 69c6234 commit 3628de0
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 99 deletions.
23 changes: 2 additions & 21 deletions .gitlab/kernel_matrix_testing/common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,6 @@
- $CI_PROJECT_DIR/tools/ci/aws_ssm_get_wrapper.sh $AGENT_QA_PROFILE_SSM_NAME >> ~/.aws/config
- export AWS_PROFILE=agent-qa-ci

.define_if_collect_complexity:
# These platforms do not support complexity collection due to old kernel versions, exclude them
# so that we can monitor real collection failures with logs/CI tags.
- UNSUPPORTED_COMPLEXITY_PLATFORMS="suse_12.5 amazon_4.14 centos_7.9 debian_9 ubuntu_16.04"
- |
if ( [ "${TEST_SET}" = "no_usm" ] || [ "${TEST_SET}" = "all_tests" ] ) && ! echo "${UNSUPPORTED_COMPLEXITY_PLATFORMS}" | grep -qw "${TAG}" ; then
export COLLECT_COMPLEXITY=yes
fi
- echo "COLLECT_COMPLEXITY=${COLLECT_COMPLEXITY}"

.collect_outcomes_kmt:
- export DD_API_KEY=$($CI_PROJECT_DIR/tools/ci/aws_ssm_get_wrapper.sh $API_KEY_ORG2_SSM_NAME)
- export MICRO_VM_IP=$(jq --exit-status --arg TAG $TAG --arg ARCH $ARCH --arg TEST_SET $TEST_SET -r '.[$ARCH].microvms | map(select(."vmset-tags"| index($TEST_SET))) | map(select(.tag==$TAG)) | .[].ip' $CI_PROJECT_DIR/stack.output)
Expand All @@ -73,13 +63,6 @@
- scp "metal_instance:/home/ubuntu/testjson-${ARCH}-${TAG}-${TEST_SET}.tar.gz" $DD_AGENT_TESTING_DIR/ || true
- ssh metal_instance "scp -r ${MICRO_VM_IP}:/tmp/test_pcaps /home/ubuntu/test_pcaps-${ARCH}-${TAG}-${TEST_SET}" || true
- mkdir -p "$CI_PROJECT_DIR/pcaps" && scp -r "metal_instance:/home/ubuntu/test_pcaps-${ARCH}-${TAG}-${TEST_SET}" "$CI_PROJECT_DIR/pcaps/test_pcaps-${ARCH}-${TAG}-${TEST_SET}" || true
# Retrieve complexity data
- !reference [.define_if_collect_complexity]
- |
if [ "${COLLECT_COMPLEXITY}" = "yes" ]; then
ssh metal_instance "scp ${MICRO_VM_IP}:/verifier-complexity.tar.gz /home/ubuntu/verifier-complexity-${ARCH}-${TAG}-${TEST_COMPONENT}.tar.gz" || true
scp "metal_instance:/home/ubuntu/verifier-complexity-${ARCH}-${TAG}-${TEST_COMPONENT}.tar.gz" $DD_AGENT_TESTING_DIR/ || true
fi
- !reference [.tag_kmt_ci_job]

.upload_junit_kmt:
Expand Down Expand Up @@ -206,7 +189,7 @@
# will not run if the dependencies are canceled
.kmt_cleanup_manual:
when: manual
allow_failure: true # Don't fail the full pipeline, these can fail if the instances are already cleaned up
allow_failure: true # Don't fail the full pipeline, these can fail if the instances are already cleaned up
needs: []

# -- Test runners
Expand Down Expand Up @@ -235,7 +218,6 @@
- echo "CI_JOB_NAME=${CI_JOB_NAME}" >> $DD_AGENT_TESTING_DIR/job_env.txt
- echo "CI_JOB_STAGE=${CI_JOB_STAGE}" >> $DD_AGENT_TESTING_DIR/job_env.txt
- inv -e gitlab.generate-ci-visibility-links --output=$EXTERNAL_LINKS_PATH
- !reference [.define_if_collect_complexity]
script:
- INSTANCE_IP=$(jq --exit-status --arg ARCH $ARCH -r '.[$ARCH].ip' $CI_PROJECT_DIR/stack.output)
- !reference [.shared_filters_and_queries]
Expand All @@ -252,7 +234,7 @@
# ssh into each micro-vm and run initialization script. This script will also run the tests.
- scp "$DD_AGENT_TESTING_DIR/job_env.txt" "metal_instance:/home/ubuntu/job_env-${ARCH}-${TAG}-${TEST_SET}.txt"
- ssh metal_instance "scp /home/ubuntu/job_env-${ARCH}-${TAG}-${TEST_SET}.txt ${MICRO_VM_IP}:/job_env.txt"
- NESTED_VM_CMD="/home/ubuntu/connector -host ${MICRO_VM_IP} -user root -ssh-file /home/kernel-version-testing/ddvm_rsa -vm-cmd 'CI=true /root/fetch_dependencies.sh ${ARCH} && COLLECT_COMPLEXITY=${COLLECT_COMPLEXITY} /opt/micro-vm-init.sh -test-tools /opt/testing-tools -retry ${RETRY} -test-root /opt/${TEST_COMPONENT}-tests -packages-run-config /opt/${TEST_SET}.json'"
- NESTED_VM_CMD="/home/ubuntu/connector -host ${MICRO_VM_IP} -user root -ssh-file /home/kernel-version-testing/ddvm_rsa -vm-cmd 'CI=true /root/fetch_dependencies.sh ${ARCH} && /opt/micro-vm-init.sh -test-tools /opt/testing-tools -retry ${RETRY} -test-root /opt/${TEST_COMPONENT}-tests -packages-run-config /opt/${TEST_SET}.json'"
- $CI_PROJECT_DIR/connector-$ARCH -host $INSTANCE_IP -user ubuntu -ssh-file $AWS_EC2_SSH_KEY_FILE -vm-cmd "${NESTED_VM_CMD}"
- ssh metal_instance "ssh ${MICRO_VM_IP} '/opt/testing-tools/test-json-review -flakes /opt/testing-tools/flakes.yaml'"
artifacts:
Expand All @@ -261,7 +243,6 @@
paths:
- $DD_AGENT_TESTING_DIR/junit-$ARCH-$TAG-$TEST_SET.tar.gz
- $DD_AGENT_TESTING_DIR/testjson-$ARCH-$TAG-$TEST_SET.tar.gz
- $DD_AGENT_TESTING_DIR/verifier-complexity-$ARCH-$TAG-${TEST_COMPONENT}.tar.gz
- $CI_PROJECT_DIR/logs
- $CI_PROJECT_DIR/pcaps
reports:
Expand Down
12 changes: 0 additions & 12 deletions pkg/ebpf/verifier/stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ import (
"path/filepath"
"reflect"
"regexp"
"runtime/debug"
"strconv"
"strings"

Expand Down Expand Up @@ -235,21 +234,10 @@ func generateLoadFunction(file string, opts *StatsOptions, results *StatsResult,
}
results.Complexity[progName] = compl
}

// Set to empty string to avoid the GC from keeping the verifier log in memory
p.VerifierLog = ""
default:
return fmt.Errorf("Unexpected type %T", field)
}
}

// After each program, force Go to release as much memory as possible
// With line-complexity enabled, each program allocates a 1GB buffer for the
// verifier log, which means that the memory footprint of the program can get
// quite large before the garbage collector kicks in and releases memory to the OS.
// This causes out-of-memory errors in CI specially, which an environment with higher memory
// restrictions and multiple programs running in different VMs.
debug.FreeOSMemory()
}

return nil
Expand Down
12 changes: 6 additions & 6 deletions pkg/ebpf/verifier/verifier_log_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ var (
insnRegex = regexp.MustCompile(`^([0-9]+): \([0-9a-f]+\) ([^;]*)\s*(; R[0-9]+.*)?`)
regStateRegex = regexp.MustCompile(`^([0-9]+): (R[0-9]+.*)`)
singleRegStateRegex = regexp.MustCompile(`R([0-9]+)(_[^=]+)?=([^ ]+)`)
regInfoRegex = regexp.MustCompile(`^(P)?([a-z_]+)?(P)?(-?[0-9]+|\((.*)\))`)
regInfoRegex = regexp.MustCompile(`^([a-z_]+)?(P)?(-?[0-9]+|\((.*)\))`)
)

// verifierLogParser is a struct that maintains the state necessary to parse the verifier log
Expand Down Expand Up @@ -204,7 +204,7 @@ func parseRegisterState(regMatch []string) (*RegisterState, error) {
return nil, fmt.Errorf("Cannot parse register value %v", regValue)
}

regType := regInfoGroups[2]
regType := regInfoGroups[1]
if regType == "inv" || regType == "" {
// Depending on the kernel version, we might see scalars represented either
// as "scalar" type, as "inv" type or as a raw number with no type
Expand All @@ -224,16 +224,16 @@ func parseRegisterState(regMatch []string) (*RegisterState, error) {
Live: liveness,
Type: regType,
Value: regValue,
Precise: regInfoGroups[1] == "P" || regInfoGroups[3] == "P", // depending on the kernel version, the precise marker might be before or after the type
Precise: regInfoGroups[2] == "P",
}, nil
}

// parseRegisterScalarValue parses the scalar value from the register state match and returns a
// human-readable value.
func parseRegisterScalarValue(regInfoGroups []string) string {
// Scalar values are either a raw numeric value, or a list of key-value pairs within parenthesis
regRawValue := regInfoGroups[4]
regAttributes := regInfoGroups[5]
regRawValue := regInfoGroups[3]
regAttributes := regInfoGroups[4]

if regAttributes == "" {
if regRawValue == "()" {
Expand All @@ -247,7 +247,7 @@ func parseRegisterScalarValue(regInfoGroups []string) string {
maxValue := int64(0)
hasRange := false

for _, kv := range strings.Split(regInfoGroups[5], ",") {
for _, kv := range strings.Split(regInfoGroups[4], ",") {
kvParts := strings.Split(kv, "=")
if strings.Contains(kvParts[0], "min") {
// Ignore errors here, mostly due to sizes (can't parse UINT_MAX in INT64) and for now we don't care
Expand Down
11 changes: 0 additions & 11 deletions pkg/ebpf/verifier/verifier_log_parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,17 +52,6 @@ func TestParseRegisterState(t *testing.T) {
Precise: false,
},
},
{
name: "PreciseScalarWithValue",
input: "R1_w=Pscalar(umax=60,var_off=(0x0;0x3c))",
expected: &RegisterState{
Register: 1,
Live: "written",
Type: "scalar",
Value: "[0, 60 (0x3C)]",
Precise: true,
},
},
}

for _, tt := range tests {
Expand Down
26 changes: 0 additions & 26 deletions tasks/kmt.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,21 +510,6 @@ def ninja_build_dependencies(ctx: Context, nw: NinjaWriter, kmt_paths: KMTPaths,
variables={"mode": "-m744"},
)

verifier_files = glob("pkg/ebpf/verifier/**/*.go")
nw.build(
rule="gobin",
pool="gobuild",
inputs=["./pkg/ebpf/verifier/calculator/main.go"],
outputs=[os.fspath(kmt_paths.dependencies / "verifier-calculator")],
implicit=verifier_files,
variables={
"go": go_path,
"chdir": "true",
"env": env_str,
"tags": f"-tags=\"{','.join(get_sysprobe_buildtags(False, False))}\"",
},
)


def ninja_copy_ebpf_files(
nw: NinjaWriter,
Expand Down Expand Up @@ -1864,17 +1849,6 @@ def tag_ci_job(ctx: Context):
tags["failure_reason"] = "infra_ssh-config"
else:
tags["failure_reason"] = "infra-unknown"

# Tag complexity results
should_collect_complexity = os.getenv("COLLECT_COMPLEXITY") == "yes"
collected_complexity = any(agent_testing_dir.glob("verifier-complexity-*.tar.gz"))

if not should_collect_complexity:
tags["complexity_collection"] = "skipped"
elif collected_complexity:
tags["complexity_collection"] = "success"
else:
tags["complexity_collection"] = "failure"
elif job_type == "setup":
if "kmt_setup_env" in job_name:
tags["setup_stage"] = "infra-provision"
Expand Down
26 changes: 3 additions & 23 deletions test/new-e2e/system-probe/test/micro-vm-init.sh
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
#!/bin/bash
set -eEuo pipefail

runner_config=$@
docker_dir=/kmt-dockers

# Add provisioning steps here !
## Start docker if available, some images (e.g. SUSE arm64 for CWS) do not have it installed
if command -v docker ; then
systemctl start docker

## Load docker images
if [[ -d "${docker_dir}" ]]; then
find "${docker_dir}" -maxdepth 1 -type f -exec docker load -i {} \;
Expand All @@ -20,7 +21,7 @@ fi
# Start tests
code=0

/opt/testing-tools/test-runner "$@" || code=$?
/opt/testing-tools/test-runner $runner_config || code=$?

if [[ -f "/job_env.txt" ]]; then
cp /job_env.txt /ci-visibility/junit/
Expand All @@ -31,25 +32,4 @@ fi
tar -C /ci-visibility/testjson -czvf /ci-visibility/testjson.tar.gz .
tar -C /ci-visibility/junit -czvf /ci-visibility/junit.tar.gz .

if [ "${COLLECT_COMPLEXITY:-}" = "yes" ]; then
echo "Collecting complexity data..."
mkdir -p /verifier-complexity
arch=$(uname -m)
if [[ "${arch}" == "aarch64" ]]; then
arch="arm64"
fi

test_root=$(echo "$@" | sed 's/.*-test-root \([^ ]*\).*/\1/')
export DD_SYSTEM_PROBE_BPF_DIR="${test_root}/pkg/ebpf/bytecode/build/${arch}"

if /opt/testing-tools/verifier-calculator -line-complexity -complexity-data-dir /verifier-complexity/complexity-data -summary-output /verifier-complexity/verifier_stats.json &> /verifier-complexity/calculator.log ; then
echo "Data collected, creating tarball at /verifier-complexity.tar.gz"
tar -C /verifier-complexity -czf /verifier-complexity.tar.gz . || echo "Failed to created verifier-complexity.tar.gz"
else
echo "Failed to collect complexity data"
echo "Calculator log:"
cat /verifier-complexity/calculator.log
fi
fi

exit ${code}

0 comments on commit 3628de0

Please sign in to comment.