diff --git a/.github/test-scripts/setup_pulp.sh b/.github/test-scripts/setup_pulp.sh index 17c617f7..6a8839ab 100755 --- a/.github/test-scripts/setup_pulp.sh +++ b/.github/test-scripts/setup_pulp.sh @@ -111,7 +111,7 @@ export XDG_RUNTIME_DIR=/tmp/pulptests mkdir $XDG_RUNTIME_DIR skopeo login --username admin --password password localhost:8080 --tls-verify=false -skopeo copy docker://registry.access.redhat.com/ubi9/ubi-micro:latest docker://localhost:8080/testrepo/ubi-micro --dest-tls-verify=false +skopeo copy docker://registry.access.redhat.com/ubi9/ubi-minimal:latest docker://localhost:8080/testrepo/ubi-minimal --dest-tls-verify=false podman login --username "$1" --password "$2" registry.redhat.io skopeo copy docker://registry.redhat.io/ansible-automation-platform-21/ansible-builder-rhel8:latest docker://localhost:8080/testrepo/ansible-builder-rhel8 --dest-tls-verify=false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e610ecbe..3c137a38 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,7 +8,7 @@ on: jobs: sanity: name: ${{ matrix.test.name }} - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 container: image: quay.io/ansible/ansible-builder-test-container:2.0.0 env: @@ -20,7 +20,7 @@ jobs: matrix: test: - name: Lint - tox_env: linters + tox_env: linters-py39 - name: Docs tox_env: docs @@ -36,7 +36,7 @@ jobs: run: tox secrets_preflight: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 name: Secrets pre-flight check env: secret_user: ${{ secrets.RH_REGISTRY_USER }} @@ -51,7 +51,7 @@ jobs: pulp_integration: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 name: Pulp Integration - ${{ matrix.py_version.name }} # NB: running this job requires access to an RH registry token; PRs can't currently access the main repo secret, # so forks will need to define the secrets locally to run these tests pre-merge @@ -103,7 +103,7 @@ jobs: integration: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 name: Integration - ${{ matrix.py_version.name }} env: @@ -113,9 +113,6 @@ jobs: fail-fast: false matrix: py_version: - - name: '3.8' - tox_env: integration-py38 - - name: '3.9' tox_env: integration-py39 @@ -156,7 +153,7 @@ jobs: unit: name: Unit - ${{ matrix.py_version.name}} - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 container: image: quay.io/ansible/ansible-builder-test-container:2.0.0 env: @@ -167,9 +164,6 @@ jobs: fail-fast: false matrix: py_version: - - name: '3.8' - tox_env: unit-py38 - - name: '3.9' tox_env: unit-py39 diff --git a/Containerfile b/Containerfile index 8e1df379..776d9c59 100644 --- a/Containerfile +++ b/Containerfile @@ -1,20 +1,19 @@ -ARG PYTHON_BASE_IMAGE=quay.io/ansible/python-base:latest -ARG PYTHON_BUILDER_IMAGE=quay.io/ansible/python-builder:latest - -FROM $PYTHON_BUILDER_IMAGE as builder -# ============================================================================= -ARG ZUUL_SIBLINGS +ARG BASE_IMAGE=quay.io/centos/centos:stream9 +FROM $BASE_IMAGE as builder # build this library (meaning ansible-builder) COPY . /tmp/src -RUN assemble -FROM $PYTHON_BASE_IMAGE -# ============================================================================= +COPY ./ansible_builder/_target_scripts/* /output/scripts/ +RUN python3 -m ensurepip +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install --no-cache-dir bindep wheel +RUN /output/scripts/assemble +FROM $BASE_IMAGE COPY --from=builder /output/ /output # building EEs require the install-from-bindep script, but not the rest of the /output folder -RUN /output/install-from-bindep && find /output/* -not -name install-from-bindep -exec rm -rf {} + +RUN /output/scripts/install-from-bindep && find /output/* -not -name install-from-bindep -exec rm -rf {} + -# move the assemble scripts themselves into this container -COPY --from=builder /usr/local/bin/assemble /usr/local/bin/assemble -COPY --from=builder /usr/local/bin/get-extras-packages /usr/local/bin/get-extras-packages +# copy the assemble scripts themselves into this container +COPY ./ansible_builder/_target_scripts/assemble /usr/local/bin/assemble +COPY ./ansible_builder/_target_scripts/get-extras-packages /usr/local/bin/get-extras-packages diff --git a/ansible_builder/__main__.py b/ansible_builder/__main__.py new file mode 100644 index 00000000..176e5692 --- /dev/null +++ b/ansible_builder/__main__.py @@ -0,0 +1,4 @@ +from . import cli + +if __name__ == '__main__': + cli.run() diff --git a/ansible_builder/_target_scripts/__init__.py b/ansible_builder/_target_scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ansible_builder/_target_scripts/assemble b/ansible_builder/_target_scripts/assemble new file mode 100755 index 00000000..97069042 --- /dev/null +++ b/ansible_builder/_target_scripts/assemble @@ -0,0 +1,178 @@ +#!/bin/bash +# Copyright (c) 2019 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Make a list of bindep dependencies and a collection of built binary +# wheels for the repo in question as well as its python dependencies. +# Install javascript tools as well to support python that needs javascript +# at build time. +set -ex + +RELEASE=$(source /etc/os-release; echo $ID) + +# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package +# manager. +PKGMGR="${PKGMGR:-}" +PKGMGR_OPTS="${PKGMGR_OPTS:-}" + +PYCMD="${PYCMD:=/usr/bin/python3}" +PIPCMD="${PIPCMD:=$PYCMD -m pip}" + +$PYCMD -m ensurepip + +if [ -z $PKGMGR ]; then + # Expect dnf to be installed, however if we find microdnf default to it. + PKGMGR=/usr/bin/dnf + if [ -f "/usr/bin/microdnf" ]; then + PKGMGR=/usr/bin/microdnf + fi +fi + +if [ "$PKGMGR" = "/usr/bin/microdnf" ] +then + if [ -z $PKGMGR_OPTS ]; then + # NOTE(pabelanger): skip install docs and weak dependencies to + # make smaller images. Sadly, setting these in dnf.conf don't + # appear to work. + PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0" + fi +fi + +# NOTE(pabelanger): Ensure all the direcotry we use exists regardless +# of the user first creating them or not. +mkdir -p /output/bindep +mkdir -p /output/wheels +mkdir -p /tmp/src + +cd /tmp/src + +$PKGMGR upgrade -y + +function install_bindep { + # Protect from the bindep builder image use of the assemble script + # to produce a wheel. Note we append because we want all + # sibling packages in here too + if [ -f bindep.txt ] ; then + bindep -l newline | sort >> /output/bindep/run.txt || true + if [ "$RELEASE" == "centos" ] ; then + bindep -l newline -b epel | sort >> /output/bindep/stage.txt || true + grep -Fxvf /output/bindep/run.txt /output/bindep/stage.txt >> /output/bindep/epel.txt || true + rm -rf /output/bindep/stage.txt + fi + compile_packages=$(bindep -b compile || true) + if [ ! -z "$compile_packages" ] ; then + $PKGMGR install -y $PKGMGR_OPTS ${compile_packages} + fi + fi +} + +function install_wheels { + # NOTE(pabelanger): If there are build requirements to install, do so. + # However do not cache them as we do not want them in the final image. + if [ -f /tmp/src/build-requirements.txt ] && [ ! -f /tmp/src/.build-requirements.txt ] ; then + $PIPCMD install $CONSTRAINTS $PIP_OPTS --no-cache -r /tmp/src/build-requirements.txt + touch /tmp/src/.build-requirements.txt + fi + # Build a wheel so that we have an install target. + # pip install . in the container context with the mounted + # source dir gets ... exciting, if setup.py exists. + # We run sdist first to trigger code generation steps such + # as are found in zuul, since the sequencing otherwise + # happens in a way that makes wheel content copying unhappy. + # pip wheel isn't used here because it puts all of the output + # in the output dir and not the wheel cache, so it's not + # possible to tell what is the wheel for the project and + # what is the wheel cache. + if [ -f setup.py ] ; then + $PYCMD setup.py sdist bdist_wheel -d /output/wheels + fi + + # Install everything so that the wheel cache is populated with + # transitive depends. If a requirements.txt file exists, install + # it directly so that people can use git url syntax to do things + # like pick up patched but unreleased versions of dependencies. + # Only do this for the main package (i.e. only write requirements + # once). + if [ -f /tmp/src/requirements.txt ] && [ ! -f /output/requirements.txt ] ; then + $PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /tmp/src/requirements.txt + cp /tmp/src/requirements.txt /output/requirements.txt + fi + # If we didn't build wheels, we can skip trying to install it. + if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then + $PIPCMD uninstall -y /output/wheels/*.whl + $PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels /output/wheels/*whl + fi + + # Install each of the extras so that we collect all possibly + # needed wheels in the wheel cache. get-extras-packages also + # writes out the req files into /output/$extra/requirements.txt. + + # FIXME: this doesn't error out the build when it fails, yay + for req in $(/build/get-extras-packages) ; do + $PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels "$req" + done +} + +PACKAGES=$* +PIP_OPTS="${PIP_OPTS-}" + +# bindep the main package +install_bindep + +# go through ZUUL_SIBLINGS, if any, and build those wheels too +for sibling in ${ZUUL_SIBLINGS:-}; do + pushd .zuul-siblings/${sibling} + install_bindep + popd +done + +# Use a clean virtualenv for install steps to prevent things from the +# current environment making us not build a wheel. +# NOTE(pabelanger): We allow users to install distro python packages of +# libraries. This is important for projects that eventually want to produce +# an RPM or offline install. +$PYCMD -m venv /tmp/venv --system-site-packages --without-pip +source /tmp/venv/bin/activate + +# If there is an upper-constraints.txt file in the source tree, +# use it in the pip commands. +if [ -f /tmp/src/upper-constraints.txt ] ; then + cp /tmp/src/upper-constraints.txt /output/upper-constraints.txt + CONSTRAINTS="-c /tmp/src/upper-constraints.txt" +fi + +# If we got a list of packages, install them, otherwise install the +# main package. +if [[ $PACKAGES ]] ; then + $PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels $PACKAGES + for package in $PACKAGES ; do + echo "$package" >> /output/packages.txt + done +else + install_wheels +fi + +# go through ZUUL_SIBLINGS, if any, and build those wheels too +for sibling in ${ZUUL_SIBLINGS:-}; do + pushd .zuul-siblings/${sibling} + install_wheels + popd +done + +$PKGMGR clean all +rm -rf /var/cache/{dnf,yum} +rm -rf /var/lib/dnf/history.* +rm -rf /var/log/{dnf.*,hawkey.log} +rm -rf /tmp/venv diff --git a/ansible_builder/_target_scripts/check_ansible b/ansible_builder/_target_scripts/check_ansible new file mode 100755 index 00000000..029be1f3 --- /dev/null +++ b/ansible_builder/_target_scripts/check_ansible @@ -0,0 +1,110 @@ +#!/bin/bash +# Copyright (c) 2023 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +##################################################################### +# Script to validate that Ansible and Ansible Runner are installed. +# +# Usage: check_ansible +# +# Options: +# PYCMD - The path to the python executable to use. +##################################################################### + +set -x + +PYCMD=$1 + +if [ -z "$PYCMD" ] +then + echo "Usage: check_ansible " + exit 1 +fi + +if [ ! -x "$PYCMD" ] +then + echo "$PYCMD is not an executable" + exit 1 +fi + +ansible --version + +if [ $? -ne 0 ] +then + cat< 1: + path = sys.argv[1] + print(get_extras_packages(path)) diff --git a/ansible_builder/_target_scripts/install-from-bindep b/ansible_builder/_target_scripts/install-from-bindep new file mode 100755 index 00000000..f3abae16 --- /dev/null +++ b/ansible_builder/_target_scripts/install-from-bindep @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright (c) 2019 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ex +# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package +# manager. +PKGMGR="${PKGMGR:-}" +PKGMGR_OPTS="${PKGMGR_OPTS:-}" + +PYCMD="${PYCMD:=/usr/bin/python3}" +PIPCMD="${PIPCMD:=$PYCMD -m pip}" + +$PYCMD -m ensurepip + +if [ -z $PKGMGR ]; then + # Expect dnf to be installed, however if we find microdnf default to it. + PKGMGR=/usr/bin/dnf + if [ -f "/usr/bin/microdnf" ]; then + PKGMGR=/usr/bin/microdnf + fi +fi + +if [ "$PKGMGR" = "/usr/bin/microdnf" ] +then + if [ -z $PKGMGR_OPTS ]; then + # NOTE(pabelanger): skip install docs and weak dependencies to + # make smaller images. Sadly, setting these in dnf.conf don't + # appear to work. + PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0" + fi +fi + +$PKGMGR upgrade -y $PKGMGR_OPTS + +if [ -f /output/bindep/run.txt ] ; then + PACKAGES=$(cat /output/bindep/run.txt) + if [ ! -z "$PACKAGES" ]; then + $PKGMGR install -y $PKGMGR_OPTS $PACKAGES + fi +fi + +if [ -f /output/bindep/epel.txt ] ; then + EPEL_PACKAGES=$(cat /output/bindep/epel.txt) + if [ ! -z "$EPEL_PACKAGES" ]; then + $PKGMGR install -y $PKGMGR_OPTS --enablerepo epel $EPEL_PACKAGES + fi +fi + +# If there's a constraints file, use it. +if [ -f /output/upper-constraints.txt ] ; then + CONSTRAINTS="-c /output/upper-constraints.txt" +fi + +# If a requirements.txt file exists, +# install it directly so that people can use git url syntax +# to do things like pick up patched but unreleased versions +# of dependencies. +if [ -f /output/requirements.txt ] ; then + $PIPCMD install $CONSTRAINTS --cache-dir=/output/wheels -r /output/requirements.txt +fi + +# Add any requested extras to the list of things to install +EXTRAS="" +for extra in $* ; do + EXTRAS="${EXTRAS} -r /output/$extra/requirements.txt" +done + +if [ -f /output/packages.txt ] ; then + # If a package list was passed to assemble, install that in the final + # image. + $PIPCMD install $CONSTRAINTS --cache-dir=/output/wheels -r /output/packages.txt $EXTRAS +else + # Install the wheels. Uninstall any existing version as siblings maybe + # be built with the same version number as the latest release, but we + # really want the speculatively built wheels installed over any + # automatic dependencies. + # NOTE(pabelanger): It is possible a project may not have a wheel, but does have requirements.txt + if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then + $PIPCMD uninstall -y /output/wheels/*.whl + $PIPCMD install $CONSTRAINTS --cache-dir=/output/wheels /output/wheels/*.whl $EXTRAS + elif [ ! -z "$EXTRAS" ] ; then + $PIPCMD uninstall -y $EXTRAS + $PIPCMD install $CONSTRAINTS --cache-dir=/output/wheels $EXTRAS + fi +fi + +# clean up after ourselves +$PKGMGR clean all +rm -rf /var/cache/{dnf,yum} +rm -rf /var/lib/dnf/history.* +rm -rf /var/log/{dnf.*,hawkey.log} diff --git a/ansible_builder/_target_scripts/introspect.py b/ansible_builder/_target_scripts/introspect.py new file mode 100644 index 00000000..5e30dc8c --- /dev/null +++ b/ansible_builder/_target_scripts/introspect.py @@ -0,0 +1,400 @@ +import argparse +import logging +import os +import sys +import yaml + +import requirements +from pkg_resources import safe_name + +base_collections_path = '/usr/share/ansible/collections' +default_file = 'execution-environment.yml' +logger = logging.getLogger(__name__) + + +def line_is_empty(line): + return bool((not line.strip()) or line.startswith('#')) + + +def read_req_file(path): + """Provide some minimal error and display handling for file reading""" + if not os.path.exists(path): + print('Expected requirements file not present at: {0}'.format(os.path.abspath(path))) + with open(path, 'r') as f: + return f.read() + + +def pip_file_data(path): + pip_content = read_req_file(path) + + pip_lines = [] + for line in pip_content.split('\n'): + if line_is_empty(line): + continue + if line.startswith('-r') or line.startswith('--requirement'): + _, new_filename = line.split(None, 1) + new_path = os.path.join(os.path.dirname(path or '.'), new_filename) + pip_lines.extend(pip_file_data(new_path)) + else: + pip_lines.append(line) + + return pip_lines + + +def bindep_file_data(path): + sys_content = read_req_file(path) + + sys_lines = [] + for line in sys_content.split('\n'): + if line_is_empty(line): + continue + sys_lines.append(line) + + return sys_lines + + +def process_collection(path): + """Return a tuple of (python_dependencies, system_dependencies) for the + collection install path given. + Both items returned are a list of dependencies. + + :param str path: root directory of collection (this would contain galaxy.yml file) + """ + CD = CollectionDefinition(path) + + py_file = CD.get_dependency('python') + pip_lines = [] + if py_file: + pip_lines = pip_file_data(os.path.join(path, py_file)) + + sys_file = CD.get_dependency('system') + bindep_lines = [] + if sys_file: + bindep_lines = bindep_file_data(os.path.join(path, sys_file)) + + return (pip_lines, bindep_lines) + + +def process(data_dir=base_collections_path, user_pip=None, user_bindep=None): + paths = [] + path_root = os.path.join(data_dir, 'ansible_collections') + + # build a list of all the valid collection paths + if os.path.exists(path_root): + for namespace in sorted(os.listdir(path_root)): + if not os.path.isdir(os.path.join(path_root, namespace)): + continue + for name in sorted(os.listdir(os.path.join(path_root, namespace))): + collection_dir = os.path.join(path_root, namespace, name) + if not os.path.isdir(collection_dir): + continue + files_list = os.listdir(collection_dir) + if 'galaxy.yml' in files_list or 'MANIFEST.json' in files_list: + paths.append(collection_dir) + + # populate the requirements content + py_req = {} + sys_req = {} + for path in paths: + col_pip_lines, col_sys_lines = process_collection(path) + CD = CollectionDefinition(path) + namespace, name = CD.namespace_name() + key = '{}.{}'.format(namespace, name) + + if col_pip_lines: + py_req[key] = col_pip_lines + + if col_sys_lines: + sys_req[key] = col_sys_lines + + # add on entries from user files, if they are given + if user_pip: + col_pip_lines = pip_file_data(user_pip) + if col_pip_lines: + py_req['user'] = col_pip_lines + if user_bindep: + col_sys_lines = bindep_file_data(user_bindep) + if col_sys_lines: + sys_req['user'] = col_sys_lines + + return { + 'python': py_req, + 'system': sys_req + } + + +def has_content(candidate_file): + """Beyond checking that the candidate exists, this also assures + that the file has something other than whitespace, + which can cause errors when given to pip. + """ + if not os.path.exists(candidate_file): + return False + with open(candidate_file, 'r') as f: + content = f.read() + return bool(content.strip().strip('\n')) + + +class CollectionDefinition: + """This class represents the dependency metadata for a collection + should be replaced by logic to hit the Galaxy API if made available + """ + + def __init__(self, collection_path): + self.reference_path = collection_path + meta_file = os.path.join(collection_path, 'meta', default_file) + if os.path.exists(meta_file): + with open(meta_file, 'r') as f: + self.raw = yaml.safe_load(f) + else: + self.raw = {'version': 1, 'dependencies': {}} + # Automatically infer requirements for collection + for entry, filename in [('python', 'requirements.txt'), ('system', 'bindep.txt')]: + candidate_file = os.path.join(collection_path, filename) + if has_content(candidate_file): + self.raw['dependencies'][entry] = filename + + def target_dir(self): + namespace, name = self.namespace_name() + return os.path.join( + base_collections_path, 'ansible_collections', + namespace, name + ) + + def namespace_name(self): + "Returns 2-tuple of namespace and name" + path_parts = [p for p in self.reference_path.split(os.path.sep) if p] + return tuple(path_parts[-2:]) + + def get_dependency(self, entry): + """A collection is only allowed to reference a file by a relative path + which is relative to the collection root + """ + req_file = self.raw.get('dependencies', {}).get(entry) + if req_file is None: + return None + elif os.path.isabs(req_file): + raise RuntimeError( + 'Collections must specify relative paths for requirements files. ' + 'The file {0} specified by {1} violates this.'.format( + req_file, self.reference_path + ) + ) + + return req_file + + +def simple_combine(reqs): + """Given a dictionary of requirement lines keyed off collections, + return a list with the most basic of de-duplication logic, + and comments indicating the sources based off the collection keys + """ + consolidated = [] + fancy_lines = [] + for collection, lines in reqs.items(): + for line in lines: + if line_is_empty(line): + continue + + base_line = line.split('#')[0].strip() + if base_line in consolidated: + i = consolidated.index(base_line) + fancy_lines[i] += ', {}'.format(collection) + else: + fancy_line = base_line + ' # from collection {}'.format(collection) + consolidated.append(base_line) + fancy_lines.append(fancy_line) + + return fancy_lines + + +def parse_args(args=sys.argv[1:]): + + parser = argparse.ArgumentParser( + prog='introspect', + description=( + 'ansible-builder introspection; injected and used during execution environment build' + ) + ) + + subparsers = parser.add_subparsers(help='The command to invoke.', dest='action') + subparsers.required = True + + create_introspect_parser(subparsers) + + args = parser.parse_args(args) + + return args + + +def run_introspect(args, logger): + data = process(args.folder, user_pip=args.user_pip, user_bindep=args.user_bindep) + if args.sanitize: + logger.info('# Sanitized dependencies for %s', args.folder) + data_for_write = data + data['python'] = sanitize_requirements(data['python']) + data['system'] = simple_combine(data['system']) + else: + logger.info('# Dependency data for %s', args.folder) + data_for_write = data.copy() + data_for_write['python'] = simple_combine(data['python']) + data_for_write['system'] = simple_combine(data['system']) + + print('---') + print(yaml.dump(data, default_flow_style=False)) + + if args.write_pip and data.get('python'): + write_file(args.write_pip, data_for_write.get('python') + ['']) + if args.write_bindep and data.get('system'): + write_file(args.write_bindep, data_for_write.get('system') + ['']) + + sys.exit(0) + + +def create_introspect_parser(parser): + introspect_parser = parser.add_parser( + 'introspect', + help='Introspects collections in folder.', + description=( + 'Loops over collections in folder and returns data about dependencies. ' + 'This is used internally and exposed here for verification. ' + 'This is targeted toward collection authors and maintainers.' + ) + ) + introspect_parser.add_argument('--sanitize', action='store_true', + help=('Sanitize and de-duplicate requirements. ' + 'This is normally done separately from the introspect script, but this ' + 'option is given to more accurately test collection content.')) + + introspect_parser.add_argument( + 'folder', default=base_collections_path, nargs='?', + help=( + 'Ansible collections path(s) to introspect. ' + 'This should have a folder named ansible_collections inside of it.' + ) + ) + # Combine user requirements and collection requirements into single file + # in the future, could look into passing multilple files to + # python-builder scripts to be fed multiple files as opposed to this + introspect_parser.add_argument( + '--user-pip', dest='user_pip', + help='An additional file to combine with collection pip requirements.' + ) + introspect_parser.add_argument( + '--user-bindep', dest='user_bindep', + help='An additional file to combine with collection bindep requirements.' + ) + introspect_parser.add_argument( + '--write-pip', dest='write_pip', + help='Write the combined pip requirements file to this location.' + ) + introspect_parser.add_argument( + '--write-bindep', dest='write_bindep', + help='Write the combined bindep requirements file to this location.' + ) + + return introspect_parser + + +EXCLUDE_REQUIREMENTS = frozenset(( + # obviously already satisfied or unwanted + 'ansible', 'ansible-base', 'python', 'ansible-core', + # general python test requirements + 'tox', 'pycodestyle', 'yamllint', 'pylint', + 'flake8', 'pytest', 'pytest-xdist', 'coverage', 'mock', 'testinfra', + # test requirements highly specific to Ansible testing + 'ansible-lint', 'molecule', 'galaxy-importer', 'voluptuous', + # already present in image for py3 environments + 'yaml', 'pyyaml', 'json', +)) + + +def sanitize_requirements(collection_py_reqs): + """ + Cleanup Python requirements by removing duplicates and excluded packages. + + The user requirements file will go through the deduplication process, but + skips the special package exclusion process. + + :param dict collection_py_reqs: A dict of lists of Python requirements, keyed + by fully qualified collection name. The special key `user` holds requirements + from the user specified requirements file from the ``--user-pip`` CLI option. + + :returns: A finalized list of sanitized Python requirements. + """ + # de-duplication + consolidated = [] + seen_pkgs = set() + + for collection, lines in collection_py_reqs.items(): + try: + for req in requirements.parse('\n'.join(lines)): + if req.specifier: + req.name = safe_name(req.name) + req.collections = [collection] # add backref for later + if req.name is None: + consolidated.append(req) + continue + if req.name in seen_pkgs: + for prior_req in consolidated: + if req.name == prior_req.name: + prior_req.specs.extend(req.specs) + prior_req.collections.append(collection) + break + continue + consolidated.append(req) + seen_pkgs.add(req.name) + except Exception as e: + logger.warning('Warning: failed to parse requirements from %s, error: %s', collection, e) + + # removal of unwanted packages + sanitized = [] + for req in consolidated: + # Exclude packages, unless it was present in the user supplied requirements. + if req.name and req.name.lower() in EXCLUDE_REQUIREMENTS and 'user' not in req.collections: + logger.debug('# Excluding requirement %s from %s', req.name, req.collections) + continue + if req.vcs or req.uri: + # Requirement like git+ or http return as-is + new_line = req.line + elif req.name: + specs = ['{0}{1}'.format(cmp, ver) for cmp, ver in req.specs] + new_line = req.name + ','.join(specs) + else: + raise RuntimeError('Could not process {0}'.format(req.line)) + + sanitized.append(new_line + ' # from collection {}'.format(','.join(req.collections))) + + return sanitized + + +def write_file(filename: str, lines: list) -> bool: + parent_dir = os.path.dirname(filename) + if parent_dir and not os.path.exists(parent_dir): + logger.warning('Creating parent directory for %s', filename) + os.makedirs(parent_dir) + new_text = '\n'.join(lines) + if os.path.exists(filename): + with open(filename, 'r') as f: + if f.read() == new_text: + logger.debug("File %s is already up-to-date.", filename) + return False + else: + logger.warning('File %s had modifications and will be rewritten', filename) + with open(filename, 'w') as f: + f.write(new_text) + return True + + +def main(): + args = parse_args() + + if args.action == 'introspect': + run_introspect(args, logger) + + logger.error("An error has occured.") + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/ansible_builder/cli.py b/ansible_builder/cli.py index 4e23e884..eca4ea97 100644 --- a/ansible_builder/cli.py +++ b/ansible_builder/cli.py @@ -1,7 +1,6 @@ import argparse import logging import sys -import yaml import os import pkg_resources @@ -11,9 +10,8 @@ from .exceptions import DefinitionError from .main import AnsibleBuilder from .policies import PolicyChoices -from .introspect import process, simple_combine, base_collections_path -from .requirements import sanitize_requirements -from .utils import configure_logger, write_file +from ._target_scripts.introspect import create_introspect_parser, run_introspect +from .utils import configure_logger logger = logging.getLogger(__name__) @@ -38,27 +36,7 @@ def run(): sys.exit(1) elif args.action == 'introspect': - data = process(args.folder, user_pip=args.user_pip, user_bindep=args.user_bindep) - if args.sanitize: - logger.info('# Sanitized dependencies for %s', args.folder) - data_for_write = data - data['python'] = sanitize_requirements(data['python']) - data['system'] = simple_combine(data['system']) - else: - logger.info('# Dependency data for %s', args.folder) - data_for_write = data.copy() - data_for_write['python'] = simple_combine(data['python']) - data_for_write['system'] = simple_combine(data['system']) - - print('---') - print(yaml.dump(data, default_flow_style=False)) - - if args.write_pip and data.get('python'): - write_file(args.write_pip, data_for_write.get('python') + ['']) - if args.write_bindep and data.get('system'): - write_file(args.write_bindep, data_for_write.get('system') + ['']) - - sys.exit(0) + run_introspect(args, logger) logger.error("An error has occured.") sys.exit(1) @@ -142,6 +120,13 @@ def add_container_options(parser): help='GPG keyring for container image validation.', ) + build_command_parser.add_argument( + '--squash', + choices=['new', 'all', 'off'], + default='new', + help='Squash layers in the final image (choices: %(choices)s). Defaults to "%(default)s". (podman only)' + ) + for p in [create_command_parser, build_command_parser]: p.add_argument('-f', '--file', @@ -173,46 +158,7 @@ def add_container_options(parser): help='The number of signatures that must successfully verify collections from ' 'ansible-galaxy ~if there are any signatures provided~. See ansible-galaxy doc for more info.') - introspect_parser = parser.add_parser( - 'introspect', - help='Introspects collections in folder.', - description=( - 'Loops over collections in folder and returns data about dependencies. ' - 'This is used internally and exposed here for verification. ' - 'This is targeted toward collection authors and maintainers.' - ) - ) - introspect_parser.add_argument('--sanitize', action='store_true', - help=('Sanitize and de-duplicate requirements. ' - 'This is normally done separately from the introspect script, but this ' - 'option is given to more accurately test collection content.')) - - introspect_parser.add_argument( - 'folder', default=base_collections_path, nargs='?', - help=( - 'Ansible collections path(s) to introspect. ' - 'This should have a folder named ansible_collections inside of it.' - ) - ) - # Combine user requirements and collection requirements into single file - # in the future, could look into passing multilple files to - # python-builder scripts to be fed multiple files as opposed to this - introspect_parser.add_argument( - '--user-pip', dest='user_pip', - help='An additional file to combine with collection pip requirements.' - ) - introspect_parser.add_argument( - '--user-bindep', dest='user_bindep', - help='An additional file to combine with collection bindep requirements.' - ) - introspect_parser.add_argument( - '--write-pip', dest='write_pip', - help='Write the combined pip requirements file to this location.' - ) - introspect_parser.add_argument( - '--write-bindep', dest='write_bindep', - help='Write the combined bindep requirements file to this location.' - ) + introspect_parser = create_introspect_parser(parser) for n in [create_command_parser, build_command_parser, introspect_parser]: diff --git a/ansible_builder/constants.py b/ansible_builder/constants.py index f3d0a5ab..c2681612 100644 --- a/ansible_builder/constants.py +++ b/ansible_builder/constants.py @@ -31,6 +31,11 @@ # Files that need to be moved into the build context, and their naming inside the context CONTEXT_FILES = { + # HACK: hacking in prototype other kinds of deps for dynamic builder + 'python_interpreter': '', + 'ansible_core': '', + 'ansible_runner': '', + 'galaxy': 'requirements.yml', 'python': 'requirements.txt', 'system': 'bindep.txt', diff --git a/ansible_builder/containerfile.py b/ansible_builder/containerfile.py new file mode 100644 index 00000000..76e8f63b --- /dev/null +++ b/ansible_builder/containerfile.py @@ -0,0 +1,396 @@ +import importlib.resources +import logging +import os + +from pathlib import Path + +from . import constants +from .user_definition import UserDefinition +from .utils import copy_directory, copy_file + + +logger = logging.getLogger(__name__) + + +class Containerfile: + newline_char = '\n' + + def __init__(self, definition: UserDefinition, + build_context=None, + container_runtime=None, + output_filename=None, + galaxy_keyring=None, + galaxy_required_valid_signature_count=None, + galaxy_ignore_signature_status_codes=()): + """ + :param str galaxy_keyring: GPG keyring file used by ansible-galaxy to opportunistically validate collection signatures. + :param str galaxy_required_valid_signature_count: Number of sigs (prepend + to disallow no sig) required for ansible-galaxy to accept collections. + :param str galaxy_ignore_signature_status_codes: GPG Status codes to ignore when validating galaxy collections. + """ + + self.build_context = build_context + self.build_outputs_dir = os.path.join( + build_context, constants.user_content_subfolder) + self.definition = definition + if output_filename is None: + filename = constants.runtime_files[container_runtime] + else: + filename = output_filename + self.path = os.path.join(self.build_context, filename) + self.container_runtime = container_runtime + self.original_galaxy_keyring = galaxy_keyring + self.copied_galaxy_keyring = None + self.galaxy_required_valid_signature_count = galaxy_required_valid_signature_count + self.galaxy_ignore_signature_status_codes = galaxy_ignore_signature_status_codes + self.steps: list = [] + + def prepare(self): + """ + Prepares the steps for the run-time specific build file. + + Incrementally builds the `self.steps` attribute by extending it with the + info to eventually be written directly to the container definition file + via a separate call to the `Containerfile.write()` method. + """ + + # Build args all need to go at top of file to avoid errors + self._insert_global_args(include_values=True) + + ###################################################################### + # Zero stage: prep base image + ###################################################################### + + # 'base' (possibly customized) will be used by future build stages + self.steps.extend([ + "# Base build stage", + "FROM $EE_BASE_IMAGE as base", + "USER root", + ]) + + self._insert_global_args() + self._insert_custom_steps('prepend_base') + + if not self.definition.builder_image: + if self.definition.python_package_name: + self.steps.append('RUN $PKGMGR install $PYPKG -y && $PKGMGR clean all') + + # We should always make sure pip is available for later stages. + self.steps.append('RUN $PYCMD -m ensurepip') + + if self.definition.ansible_ref_install_list: + self.steps.append('RUN $PYCMD -m pip install --no-cache-dir $ANSIBLE_INSTALL_REFS') + + self._create_folder_copy_files() + self._insert_custom_steps('append_base') + + ###################################################################### + # First stage (aka, galaxy): install roles/collections + # + # NOTE: This stage is skipped if there are no galaxy requirements. + ###################################################################### + + if self.definition.get_dep_abs_path('galaxy'): + self.steps.extend([ + "", + "# Galaxy build stage", + "FROM base as galaxy", + ]) + + self._insert_global_args() + self._insert_custom_steps('prepend_galaxy') + + # Run the check for the 'ansible-galaxy' executable. This will fail + # the build if the command is not found. + self.steps.append("RUN /output/scripts/check_galaxy") + + self._prepare_ansible_config_file() + self._prepare_build_context() + self._prepare_galaxy_install_steps() + self._insert_custom_steps('append_galaxy') + + ###################################################################### + # Second stage (aka, builder): assemble (pip installs, bindep run) + ###################################################################### + + if self.definition.builder_image: + # Note: A builder image can be specified only in V1 or V2 schema. + image = "$EE_BUILDER_IMAGE" + else: + # dynamic builder, create from customized base + image = "base" + + self.steps.extend([ + "", + "# Builder build stage", + f"FROM {image} as builder", + ]) + + self._insert_global_args() + + if image == "base": + self.steps.append("RUN $PYCMD -m pip install --no-cache-dir bindep pyyaml requirements-parser") + + self._insert_custom_steps('prepend_builder') + self._prepare_galaxy_copy_steps() + self._prepare_introspect_assemble_steps() + self._insert_custom_steps('append_builder') + + ###################################################################### + # Final stage: package manager installs from bindep output + ###################################################################### + + self.steps.extend([ + "", + "# Final build stage", + "FROM base as final", + ]) + + self._insert_global_args() + self._insert_custom_steps('prepend_final') + + # Run the check for 'ansible' and 'ansible-runner' installations for + # any EE version 3 or above, unless explicitly skipped. + if self.definition.version >= 3 and not self.definition.options['skip_ansible_check']: + self.steps.append("RUN /output/scripts/check_ansible $PYCMD") + + self._prepare_galaxy_copy_steps() + self._prepare_system_runtime_deps_steps() + + # install init package if specified + # FUTURE: could move this into the pre-install wheel phase + if init_pip_pkg := self.definition.container_init.get('package_pip'): + self.steps.append(f"RUN $PYCMD -m pip install --no-cache-dir '{init_pip_pkg}'") + + self._insert_custom_steps('append_final') + self._prepare_label_steps() + self._prepare_entrypoint_steps() + + def write(self): + """ + Writes the steps (built via the `Containerfile.prepare()` method) for + the runtime-specific build file (Dockerfile or Containerfile) to the + context directory. + """ + with open(self.path, 'w') as f: + for step in self.steps: + f.write(step + self.newline_char) + return True + + def _insert_global_args(self, include_values: bool = False): + """ + Insert Containerfile ARGs and, possibly, their values. + + An ARG with a None or empty value will not be included. + """ + + # ARGs will be output in the order listed below. + global_args = { + 'EE_BASE_IMAGE': self.definition.build_arg_defaults['EE_BASE_IMAGE'], + 'EE_BUILDER_IMAGE': self.definition.build_arg_defaults['EE_BUILDER_IMAGE'], + 'PYCMD': self.definition.python_path or '/usr/bin/python3', + 'PYPKG': self.definition.python_package_name, + 'ANSIBLE_GALAXY_CLI_COLLECTION_OPTS': self.definition.build_arg_defaults['ANSIBLE_GALAXY_CLI_COLLECTION_OPTS'], + 'ANSIBLE_GALAXY_CLI_ROLE_OPTS': self.definition.build_arg_defaults['ANSIBLE_GALAXY_CLI_ROLE_OPTS'], + 'ANSIBLE_INSTALL_REFS': self.definition.ansible_ref_install_list, + } + + if self.definition.version >= 3: + global_args['PKGMGR'] = self.definition.options['package_manager_path'] + + for arg, value in global_args.items(): + if include_values and value: + # quote the value in case it includes spaces + self.steps.append(f'ARG {arg}="{value}"') + elif value: + self.steps.append(f"ARG {arg}") + self.steps.append("") + + def _create_folder_copy_files(self): + """ + Creates the build context directory, and copies any potential context + files (python, galaxy, or bindep requirements) into it. + """ + scripts_dir = str(Path(self.build_outputs_dir) / 'scripts') + os.makedirs(scripts_dir, exist_ok=True) + + for item, new_name in constants.CONTEXT_FILES.items(): + # HACK: new dynamic base/builder + if not new_name: + continue + + requirement_path = self.definition.get_dep_abs_path(item) + if requirement_path is None: + continue + dest = os.path.join( + self.build_context, constants.user_content_subfolder, new_name) + copy_file(requirement_path, dest) + + if self.original_galaxy_keyring: + copy_file(self.original_galaxy_keyring, os.path.join(self.build_outputs_dir, constants.default_keyring_name)) + + self._handle_additional_build_files() + + if self.definition.ansible_config: + copy_file( + self.definition.ansible_config, + os.path.join(self.build_outputs_dir, 'ansible.cfg') + ) + + # HACK: this sucks + scriptres = importlib.resources.files('ansible_builder._target_scripts') + for script in ('assemble', 'get-extras-packages', 'install-from-bindep', 'introspect.py', 'check_galaxy', 'check_ansible'): + with importlib.resources.as_file(scriptres / script) as script_path: + # FIXME: just use builtin copy? + copy_file(str(script_path), scripts_dir) + + # later steps depend on base image containing these scripts + context_dir = Path(self.build_outputs_dir).stem + self.steps.append(f'COPY {context_dir}/scripts/ /output/scripts/') + + def _handle_additional_build_files(self): + """ + Deal with any files the user wants added to the image build context. + + The 'src' value is either an absolute path, or a path relative to the + EE definition file. For example, 'src' can be a relative path like + "data_files/configs/*.cfg", but cannot be "/home/user/files/*.cfg", + the latter not being relative to the EE. + """ + for entry in self.definition.additional_build_files: + src = Path(entry['src']) + dst = entry['dest'] + + # 'src' is either an absolute path or a path glob relative to the EE file + ee_file = Path(self.definition.filename) + if src.is_absolute(): + if not src.exists(): + logger.warning(f"User build file {src} does not exist.") + continue + src_files = [src] + elif not (src_files := list(ee_file.parent.glob(str(src)))): + logger.warning(f"No matches for '{src}' in additional_build_files.") + continue + + final_dst = Path(self.build_outputs_dir) / dst + logger.debug(f"Creating {final_dst}") + final_dst.mkdir(parents=True, exist_ok=True) + + for src_file in src_files: + if src_file.is_dir(): + copy_directory(src_file, final_dst) + else: + # Destination is the subdir under context plus the basename of the source + copy_location = final_dst / src_file.name + copy_file(str(src_file), str(copy_location)) + + def _prepare_ansible_config_file(self): + if self.definition.version != 1: + return + + ansible_config_file_path = self.definition.ansible_config + if ansible_config_file_path: + context_file_path = os.path.join( + constants.user_content_subfolder, 'ansible.cfg') + self.steps.extend([ + f"ADD {context_file_path} ~/.ansible.cfg", + "", + ]) + + def _insert_custom_steps(self, section: str): + additional_steps = self.definition.additional_build_steps + if additional_steps: + section_steps = additional_steps.get(section) + if section_steps: + if isinstance(section_steps, str): + lines = section_steps.strip().splitlines() + else: + lines = section_steps + self.steps.extend(lines) + + def _prepare_label_steps(self): + self.steps.extend([ + "LABEL ansible-execution-environment=true", + ]) + + def _prepare_build_context(self): + if any(self.definition.get_dep_abs_path(thing) for thing in ('galaxy', 'system', 'python')): + self.steps.extend([ + "ADD {0} /build".format(constants.user_content_subfolder), + "WORKDIR /build", + "", + ]) + + def _prepare_galaxy_install_steps(self): + env = "" + install_opts = f"-r {constants.CONTEXT_FILES['galaxy']} --collections-path \"{constants.base_collections_path}\"" + + if self.galaxy_ignore_signature_status_codes: + for code in self.galaxy_ignore_signature_status_codes: + install_opts += f" --ignore-signature-status-code {code}" + + if self.galaxy_required_valid_signature_count: + install_opts += f" --required-valid-signature-count {self.galaxy_required_valid_signature_count}" + + if self.original_galaxy_keyring: + install_opts += f" --keyring \"{constants.default_keyring_name}\"" + else: + # We have to use the environment variable to disable signature + # verification because older versions (<2.13) of ansible-galaxy do + # not support the --disable-gpg-verify option. We don't use ENV in + # the Containerfile since we need it only during the build and not + # in the final image. + env = "ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 " + + self.steps.append( + f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {constants.CONTEXT_FILES['galaxy']}" + f" --roles-path \"{constants.base_roles_path}\"", + ) + self.steps.append(f"RUN {env}ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS {install_opts}") + + def _prepare_introspect_assemble_steps(self): + # The introspect/assemble block is valid if there are any form of requirements + if any(self.definition.get_dep_abs_path(thing) for thing in ('galaxy', 'system', 'python')): + + introspect_cmd = "RUN $PYCMD /output/scripts/introspect.py introspect --sanitize" + + requirements_file_exists = os.path.exists(os.path.join( + self.build_outputs_dir, constants.CONTEXT_FILES['python'] + )) + + if requirements_file_exists: + relative_requirements_path = os.path.join(constants.user_content_subfolder, constants.CONTEXT_FILES['python']) + self.steps.append(f"ADD {relative_requirements_path} {constants.CONTEXT_FILES['python']}") + # WORKDIR is /build, so we use the (shorter) relative paths there + introspect_cmd += " --user-pip={0}".format(constants.CONTEXT_FILES['python']) + bindep_exists = os.path.exists(os.path.join(self.build_outputs_dir, constants.CONTEXT_FILES['system'])) + if bindep_exists: + relative_bindep_path = os.path.join(constants.user_content_subfolder, constants.CONTEXT_FILES['system']) + self.steps.append(f"ADD {relative_bindep_path} {constants.CONTEXT_FILES['system']}") + introspect_cmd += " --user-bindep={0}".format(constants.CONTEXT_FILES['system']) + + introspect_cmd += " --write-bindep=/tmp/src/bindep.txt --write-pip=/tmp/src/requirements.txt" + + self.steps.append(introspect_cmd) + self.steps.append("RUN /output/scripts/assemble") + + def _prepare_system_runtime_deps_steps(self): + self.steps.extend([ + "COPY --from=builder /output/ /output/", + "RUN /output/scripts/install-from-bindep && rm -rf /output/wheels", + ]) + + def _prepare_galaxy_copy_steps(self): + if self.definition.get_dep_abs_path('galaxy'): + self.steps.extend([ + "", + "COPY --from=galaxy {0} {0}".format( + os.path.dirname(constants.base_collections_path.rstrip('/')) # /usr/share/ansible + ), + "", + ]) + + def _prepare_entrypoint_steps(self): + if ep := self.definition.container_init.get('entrypoint'): + self.steps.append(f"ENTRYPOINT {ep}") + if cmd := self.definition.container_init.get('cmd'): + self.steps.append(f"CMD {cmd}") diff --git a/ansible_builder/ee_schema.py b/ansible_builder/ee_schema.py new file mode 100644 index 00000000..a0228ffc --- /dev/null +++ b/ansible_builder/ee_schema.py @@ -0,0 +1,392 @@ +from jsonschema import validate, SchemaError, ValidationError + +from ansible_builder.exceptions import DefinitionError + + +TYPE_StringOrListOfStrings = { + "anyOf": [ + {"type": "string"}, + { + "type": "array", + "items": { + "type": "string" + } + } + ] +} + + +############ +# Version 1 +############ + +schema_v1 = { + "type": "object", + "additionalProperties": False, + "properties": { + "version": { + "description": "The EE schema version number", + "type": "number", + }, + + "ansible_config": { + "type": "string", + }, + + "build_arg_defaults": { + "type": "object", + "additionalProperties": False, + "properties": { + "EE_BASE_IMAGE": { + "type": "string", + }, + "EE_BUILDER_IMAGE": { + "type": "string", + }, + "ANSIBLE_GALAXY_CLI_COLLECTION_OPTS": { + "type": "string", + }, + "ANSIBLE_GALAXY_CLI_ROLE_OPTS": { + "type": "string", + }, + }, + }, + + "dependencies": { + "description": "The dependency stuff", + "type": "object", + "additionalProperties": False, + "properties": { + "python": { + "description": "The python dependency file", + "type": "string", + }, + "galaxy": { + "description": "The Galaxy dependency file", + "type": "string", + }, + "system": { + "description": "The system dependency file", + "type": "string", + }, + }, + }, + + "additional_build_steps": { + "type": "object", + "additionalProperties": False, + "properties": { + "prepend": TYPE_StringOrListOfStrings, + "append": TYPE_StringOrListOfStrings, + }, + }, + }, +} + + +############ +# Version 2 +############ + +schema_v2 = { + "type": "object", + "additionalProperties": False, + "properties": { + "version": { + "description": "The EE schema version number", + "type": "number", + }, + + "ansible_config": { + "type": "string", + }, + + "build_arg_defaults": { + "type": "object", + "additionalProperties": False, + "properties": { + "ANSIBLE_GALAXY_CLI_COLLECTION_OPTS": { + "type": "string", + }, + "ANSIBLE_GALAXY_CLI_ROLE_OPTS": { + "type": "string", + }, + }, + }, + + "dependencies": { + "description": "The dependency stuff", + "type": "object", + "additionalProperties": False, + "properties": { + "python": { + "description": "The python dependency file", + "type": "string", + }, + "galaxy": { + "description": "The Galaxy dependency file", + "type": "string", + }, + "system": { + "description": "The system dependency file", + "type": "string", + }, + }, + }, + + "additional_build_steps": { + "type": "object", + "additionalProperties": False, + "properties": { + "prepend": TYPE_StringOrListOfStrings, + "append": TYPE_StringOrListOfStrings, + }, + }, + + "images": { + "type": "object", + "additionalProperties": False, + "properties": { + "base_image": { + "type": "object", + "properties": { + "name": { + "type": "string", + }, + "signature_original_name": { + "type": "string", + }, + }, + }, + "builder_image": { + "type": "object", + "properties": { + "name": { + "type": "string", + }, + "signature_original_name": { + "type": "string", + }, + }, + } + }, + }, + }, +} + + +############ +# Version 3 +############ + +schema_v3 = { + "type": "object", + "additionalProperties": False, + "properties": { + "version": { + "description": "The EE schema version number", + "type": "number", + }, + + "build_arg_defaults": { + "type": "object", + "additionalProperties": False, + "properties": { + "ANSIBLE_GALAXY_CLI_COLLECTION_OPTS": { + "type": "string", + }, + "ANSIBLE_GALAXY_CLI_ROLE_OPTS": { + "type": "string", + }, + }, + }, + + "dependencies": { + "description": "The dependency stuff", + "type": "object", + "additionalProperties": False, + "properties": { + "python": TYPE_StringOrListOfStrings, + "galaxy": { + "description": "The Galaxy dependency file", + "type": "string", + }, + "system": TYPE_StringOrListOfStrings, + "python_interpreter": { + "description": "Python package name and path", + "type": "object", + "additionalProperties": False, + "properties": { + "package_name": { + "description": "The python package to install", + "type": "string", + }, + "python_path": { + "description": "Path to the python interpreter", + "type": "string", + }, + }, + }, + "ansible_core": { + "description": "Ansible version for pip installation", + "type": "string", + }, + "ansible_runner": { + "description": "Ansible Runner version for pip installation", + "type": "string", + }, + }, + }, + + "images": { + "type": "object", + "additionalProperties": False, + "properties": { + "base_image": { + "type": "object", + "properties": { + "name": { + "type": "string", + }, + "signature_original_name": { + "type": "string", + }, + }, + }, + }, + }, + + "additional_build_steps": { + "type": "object", + "additionalProperties": False, + "properties": { + "prepend_base": TYPE_StringOrListOfStrings, + "append_base": TYPE_StringOrListOfStrings, + "prepend_galaxy": TYPE_StringOrListOfStrings, + "append_galaxy": TYPE_StringOrListOfStrings, + "prepend_builder": TYPE_StringOrListOfStrings, + "append_builder": TYPE_StringOrListOfStrings, + "prepend_final": TYPE_StringOrListOfStrings, + "append_final": TYPE_StringOrListOfStrings, + }, + }, + + "additional_build_files": { + "description": "Describes files to add to the build context", + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "src": { + "description": "File to add to build context", + "type": "string", + }, + "dest": { + "description": "Relative subdirectory under build context to place file", + "type": "string", + }, + }, + "required": ["src", "dest"], + }, + }, + + "options": { + "description": "Options that effect runtime behavior", + "type": "object", + "additionalProperties": False, + "properties": { + "skip_ansible_check": { + "description": "Disables the check for Ansible/Runner in final image", + "type": "boolean", + }, + "package_manager_path": { + "description": "Path to the system package manager to use", + "type": "string", + }, + "container_init": { + "description": "Customize container startup behavior", + "type": "object", + "additionalProperties": False, + "properties": { + "package_pip": { + "description": "package to install via pip for entrypoint support", + "type": "string", + }, + "entrypoint": { + "description": "literal value for ENTRYPOINT Containerfile directive", + "type": "string", + }, + "cmd": { + "description": "literal value for CMD Containerfile directive", + "type": "string", + }, + }, + }, + }, + }, + }, +} + + +def validate_schema(ee_def: dict): + schema_version = 1 + if 'version' in ee_def: + try: + schema_version = int(ee_def['version']) + except ValueError: + raise DefinitionError(f"Schema version not an integer: {ee_def['version']}") + + if schema_version not in (1, 2, 3): + raise DefinitionError(f"Unsupported schema version: {schema_version}") + + try: + if schema_version == 1: + validate(instance=ee_def, schema=schema_v1) + elif schema_version == 2: + validate(instance=ee_def, schema=schema_v2) + elif schema_version == 3: + validate(instance=ee_def, schema=schema_v3) + except (SchemaError, ValidationError) as e: + raise DefinitionError(msg=e.message, path=e.absolute_schema_path) + + _handle_aliasing(ee_def) + + if schema_version >= 3: + _handle_options_defaults(ee_def) + + +def _handle_aliasing(ee_def: dict): + """ + Upgrade EE keys into standard keys across schema versions. + + Some EE keys are renamed across schema versions. So that we don't need to + check schema version, or do some other hackery, in the builder code when + accessing the values, just do the key name upgrades/aliasing here. + """ + + if 'additional_build_steps' in ee_def: + # V1/V2 'prepend' == V3 'prepend_final' + if 'prepend' in ee_def['additional_build_steps']: + ee_def['additional_build_steps']['prepend_final'] = ee_def['additional_build_steps']['prepend'] + + # V1/V2 'append' == V3 'append_final' + if 'append' in ee_def['additional_build_steps']: + ee_def['additional_build_steps']['append_final'] = ee_def['additional_build_steps']['append'] + + +def _handle_options_defaults(ee_def: dict): + """ + JSONSchema can document a "default" value, but it isn't used for validation. + This method is used to set any default values for the "options" dictionary + properties. + """ + options = ee_def.setdefault('options', {}) + + options.setdefault('skip_ansible_check', False) + options.setdefault('package_manager_path', '/usr/bin/dnf') + options.setdefault('container_init', { + 'package_pip': 'dumb-init==1.2.5', + 'entrypoint': '["dumb-init"]', + 'cmd': '["bash"]', + }) diff --git a/ansible_builder/exceptions.py b/ansible_builder/exceptions.py index d301c44f..9241c1fd 100644 --- a/ansible_builder/exceptions.py +++ b/ansible_builder/exceptions.py @@ -1,10 +1,15 @@ +from __future__ import annotations + import sys +from typing import Sequence + class DefinitionError(RuntimeError): # Eliminate the output of traceback before our custom error message prints out sys.tracebacklimit = 0 - def __init__(self, msg): + def __init__(self, msg: str, path: Sequence[str | int] | None = None): super(DefinitionError, self).__init__("%s" % msg) self.msg = msg + self.path = path diff --git a/ansible_builder/introspect.py b/ansible_builder/introspect.py deleted file mode 100644 index 5dc4e09b..00000000 --- a/ansible_builder/introspect.py +++ /dev/null @@ -1,202 +0,0 @@ -import os -import yaml - - -base_collections_path = '/usr/share/ansible/collections' -default_file = 'execution-environment.yml' - - -def line_is_empty(line): - return bool((not line.strip()) or line.startswith('#')) - - -def read_req_file(path): - """Provide some minimal error and display handling for file reading""" - if not os.path.exists(path): - print('Expected requirements file not present at: {0}'.format(os.path.abspath(path))) - with open(path, 'r') as f: - return f.read() - - -def pip_file_data(path): - pip_content = read_req_file(path) - - pip_lines = [] - for line in pip_content.split('\n'): - if line_is_empty(line): - continue - if line.startswith('-r') or line.startswith('--requirement'): - _, new_filename = line.split(None, 1) - new_path = os.path.join(os.path.dirname(path or '.'), new_filename) - pip_lines.extend(pip_file_data(new_path)) - else: - pip_lines.append(line) - - return pip_lines - - -def bindep_file_data(path): - sys_content = read_req_file(path) - - sys_lines = [] - for line in sys_content.split('\n'): - if line_is_empty(line): - continue - sys_lines.append(line) - - return sys_lines - - -def process_collection(path): - """Return a tuple of (python_dependencies, system_dependencies) for the - collection install path given. - Both items returned are a list of dependencies. - - :param str path: root directory of collection (this would contain galaxy.yml file) - """ - CD = CollectionDefinition(path) - - py_file = CD.get_dependency('python') - pip_lines = [] - if py_file: - pip_lines = pip_file_data(os.path.join(path, py_file)) - - sys_file = CD.get_dependency('system') - bindep_lines = [] - if sys_file: - bindep_lines = bindep_file_data(os.path.join(path, sys_file)) - - return (pip_lines, bindep_lines) - - -def process(data_dir=base_collections_path, user_pip=None, user_bindep=None): - paths = [] - path_root = os.path.join(data_dir, 'ansible_collections') - - # build a list of all the valid collection paths - if os.path.exists(path_root): - for namespace in sorted(os.listdir(path_root)): - if not os.path.isdir(os.path.join(path_root, namespace)): - continue - for name in sorted(os.listdir(os.path.join(path_root, namespace))): - collection_dir = os.path.join(path_root, namespace, name) - if not os.path.isdir(collection_dir): - continue - files_list = os.listdir(collection_dir) - if 'galaxy.yml' in files_list or 'MANIFEST.json' in files_list: - paths.append(collection_dir) - - # populate the requirements content - py_req = {} - sys_req = {} - for path in paths: - col_pip_lines, col_sys_lines = process_collection(path) - CD = CollectionDefinition(path) - namespace, name = CD.namespace_name() - key = '{}.{}'.format(namespace, name) - - if col_pip_lines: - py_req[key] = col_pip_lines - - if col_sys_lines: - sys_req[key] = col_sys_lines - - # add on entries from user files, if they are given - if user_pip: - col_pip_lines = pip_file_data(user_pip) - if col_pip_lines: - py_req['user'] = col_pip_lines - if user_bindep: - col_sys_lines = bindep_file_data(user_bindep) - if col_sys_lines: - sys_req['user'] = col_sys_lines - - return { - 'python': py_req, - 'system': sys_req - } - - -def has_content(candidate_file): - """Beyond checking that the candidate exists, this also assures - that the file has something other than whitespace, - which can cause errors when given to pip. - """ - if not os.path.exists(candidate_file): - return False - with open(candidate_file, 'r') as f: - content = f.read() - return bool(content.strip().strip('\n')) - - -class CollectionDefinition: - """This class represents the dependency metadata for a collection - should be replaced by logic to hit the Galaxy API if made available - """ - - def __init__(self, collection_path): - self.reference_path = collection_path - meta_file = os.path.join(collection_path, 'meta', default_file) - if os.path.exists(meta_file): - with open(meta_file, 'r') as f: - self.raw = yaml.safe_load(f) - else: - self.raw = {'version': 1, 'dependencies': {}} - # Automatically infer requirements for collection - for entry, filename in [('python', 'requirements.txt'), ('system', 'bindep.txt')]: - candidate_file = os.path.join(collection_path, filename) - if has_content(candidate_file): - self.raw['dependencies'][entry] = filename - - def target_dir(self): - namespace, name = self.namespace_name() - return os.path.join( - base_collections_path, 'ansible_collections', - namespace, name - ) - - def namespace_name(self): - "Returns 2-tuple of namespace and name" - path_parts = [p for p in self.reference_path.split(os.path.sep) if p] - return tuple(path_parts[-2:]) - - def get_dependency(self, entry): - """A collection is only allowed to reference a file by a relative path - which is relative to the collection root - """ - req_file = self.raw.get('dependencies', {}).get(entry) - if req_file is None: - return None - elif os.path.isabs(req_file): - raise RuntimeError( - 'Collections must specify relative paths for requirements files. ' - 'The file {0} specified by {1} violates this.'.format( - req_file, self.reference_path - ) - ) - - return req_file - - -def simple_combine(reqs): - """Given a dictionary of requirement lines keyed off collections, - return a list with the most basic of de-duplication logic, - and comments indicating the sources based off the collection keys - """ - consolidated = [] - fancy_lines = [] - for collection, lines in reqs.items(): - for line in lines: - if line_is_empty(line): - continue - - base_line = line.split('#')[0].strip() - if base_line in consolidated: - i = consolidated.index(base_line) - fancy_lines[i] += ', {}'.format(collection) - else: - fancy_line = base_line + ' # from collection {}'.format(collection) - consolidated.append(base_line) - fancy_lines.append(fancy_line) - - return fancy_lines diff --git a/ansible_builder/main.py b/ansible_builder/main.py index 0b6e518e..1045f1e2 100644 --- a/ansible_builder/main.py +++ b/ansible_builder/main.py @@ -2,12 +2,10 @@ import os from . import constants +from .containerfile import Containerfile from .policies import PolicyChoices, IgnoreAll, ExactReference -from .steps import ( - AdditionalBuildSteps, BuildContextSteps, GalaxyInstallSteps, GalaxyCopySteps, AnsibleConfigSteps -) from .user_definition import UserDefinition -from .utils import run_command, copy_file +from .utils import run_command logger = logging.getLogger(__name__) @@ -30,6 +28,7 @@ def __init__(self, galaxy_ignore_signature_status_codes=(), container_policy=None, container_keyring=None, + squash=None, ): """ :param str galaxy_keyring: GPG keyring file used by ansible-galaxy to opportunistically validate collection signatures. @@ -65,6 +64,7 @@ def __init__(self, galaxy_ignore_signature_status_codes=galaxy_ignore_signature_status_codes) self.verbosity = verbosity self.container_policy, self.container_keyring = self._handle_image_validation_opts(container_policy, container_keyring) + self.squash = squash def _handle_image_validation_opts(self, policy, keyring): """ @@ -87,7 +87,7 @@ def _handle_image_validation_opts(self, policy, keyring): resolved_keyring = None if policy is not None: - if self.version != "2": + if self.version != 2: raise ValueError(f'--container-policy not valid with version {self.version} format') # Require podman runtime @@ -132,31 +132,7 @@ def ansible_config(self): def create(self): logger.debug('Ansible Builder is generating your execution environment build context.') - return self.write_containerfile() - - def write_containerfile(self): - # File preparation - self.containerfile.create_folder_copy_files() - - # First stage, galaxy - self.containerfile.prepare_galaxy_stage_steps() - self.containerfile.prepare_ansible_config_file() - self.containerfile.prepare_build_context() - self.containerfile.prepare_galaxy_install_steps() - - # Second stage, builder - self.containerfile.prepare_build_stage_steps() - self.containerfile.prepare_galaxy_copy_steps() - self.containerfile.prepare_introspect_assemble_steps() - - # Second stage - self.containerfile.prepare_final_stage_steps() - self.containerfile.prepare_prepended_steps() - self.containerfile.prepare_galaxy_copy_steps() - self.containerfile.prepare_system_runtime_deps_steps() - self.containerfile.prepare_appended_steps() - self.containerfile.prepare_label_steps() - logger.debug('Rewriting Containerfile to capture collection requirements') + self.containerfile.prepare() return self.containerfile.write() @property @@ -188,6 +164,13 @@ def build_command(self): if self.no_cache: command.append('--no-cache') + # Image layer squashing works only with podman. Still experimental for docker. + if self.container_runtime == 'podman' and self.squash and self.squash != 'off': + if self.squash == 'new': + command.append('--squash') + elif self.squash == 'all': + command.append('--squash-all') + if self.container_policy: logger.debug('Container policy is %s', PolicyChoices(self.container_policy).value) @@ -220,202 +203,10 @@ def build_command(self): return command def build(self): + self.create() logger.debug('Ansible Builder is building your execution environment image. Tags: %s', ", ".join(self.tags)) - self.write_containerfile() run_command(self.build_command) if self.prune_images: logger.debug('Removing all dangling images') run_command(self.prune_image_command) return True - - -class Containerfile: - newline_char = '\n' - - def __init__(self, definition, - build_context=None, - container_runtime=None, - output_filename=None, - galaxy_keyring=None, - galaxy_required_valid_signature_count=None, - galaxy_ignore_signature_status_codes=()): - """ - :param str galaxy_keyring: GPG keyring file used by ansible-galaxy to opportunistically validate collection signatures. - :param str galaxy_required_valid_signature_count: Number of sigs (prepend + to disallow no sig) required for ansible-galaxy to accept collections. - :param str galaxy_ignore_signature_status_codes: GPG Status codes to ignore when validating galaxy collections. - """ - - self.build_context = build_context - self.build_outputs_dir = os.path.join( - build_context, constants.user_content_subfolder) - self.definition = definition - if output_filename is None: - filename = constants.runtime_files[container_runtime] - else: - filename = output_filename - self.path = os.path.join(self.build_context, filename) - self.container_runtime = container_runtime - self.original_galaxy_keyring = galaxy_keyring - self.copied_galaxy_keyring = None - self.galaxy_required_valid_signature_count = galaxy_required_valid_signature_count - self.galaxy_ignore_signature_status_codes = galaxy_ignore_signature_status_codes - - # Build args all need to go at top of file to avoid errors - self.steps = [ - "ARG EE_BASE_IMAGE={}".format( - self.definition.build_arg_defaults['EE_BASE_IMAGE'] - ), - "ARG EE_BUILDER_IMAGE={}".format( - self.definition.build_arg_defaults['EE_BUILDER_IMAGE'] - ), - ] - - def create_folder_copy_files(self): - """Creates the build context file for this Containerfile - moves files from the definition into the folder - """ - os.makedirs(self.build_outputs_dir, exist_ok=True) - - for item, new_name in constants.CONTEXT_FILES.items(): - requirement_path = self.definition.get_dep_abs_path(item) - if requirement_path is None: - continue - dest = os.path.join( - self.build_context, constants.user_content_subfolder, new_name) - copy_file(requirement_path, dest) - - if self.original_galaxy_keyring: - self.copied_galaxy_keyring = constants.default_keyring_name - copy_file(self.original_galaxy_keyring, os.path.join(self.build_outputs_dir, self.copied_galaxy_keyring)) - - if self.definition.ansible_config: - copy_file( - self.definition.ansible_config, - os.path.join(self.build_outputs_dir, 'ansible.cfg') - ) - - def prepare_ansible_config_file(self): - ansible_config_file_path = self.definition.ansible_config - if ansible_config_file_path: - context_file_path = os.path.join( - constants.user_content_subfolder, 'ansible.cfg') - return self.steps.extend(AnsibleConfigSteps(context_file_path)) - - def prepare_prepended_steps(self): - additional_prepend_steps = self.definition.get_additional_commands() - if additional_prepend_steps: - prepended_steps = additional_prepend_steps.get('prepend') - if prepended_steps: - return self.steps.extend(AdditionalBuildSteps(prepended_steps)) - - return False - - def prepare_appended_steps(self): - additional_append_steps = self.definition.get_additional_commands() - if additional_append_steps: - appended_steps = additional_append_steps.get('append') - if appended_steps: - return self.steps.extend(AdditionalBuildSteps(appended_steps)) - - return False - - def prepare_label_steps(self): - self.steps.extend([ - "LABEL ansible-execution-environment=true", - ]) - - return self.steps - - def prepare_build_context(self): - if any(self.definition.get_dep_abs_path(thing) for thing in ('galaxy', 'system', 'python')): - self.steps.extend(BuildContextSteps()) - return self.steps - - def prepare_galaxy_install_steps(self): - if self.definition.get_dep_abs_path('galaxy'): - self.steps.extend(GalaxyInstallSteps(constants.CONTEXT_FILES['galaxy'], - self.copied_galaxy_keyring, - self.galaxy_ignore_signature_status_codes, - self.galaxy_required_valid_signature_count)) - return self.steps - - def prepare_introspect_assemble_steps(self): - # The introspect/assemble block is valid if there are any form of requirements - if any(self.definition.get_dep_abs_path(thing) for thing in ('galaxy', 'system', 'python')): - - introspect_cmd = "RUN ansible-builder introspect --sanitize" - - requirements_file_exists = os.path.exists(os.path.join( - self.build_outputs_dir, constants.CONTEXT_FILES['python'] - )) - if requirements_file_exists: - relative_requirements_path = os.path.join(constants.user_content_subfolder, constants.CONTEXT_FILES['python']) - self.steps.append(f"ADD {relative_requirements_path} {constants.CONTEXT_FILES['python']}") - # WORKDIR is /build, so we use the (shorter) relative paths there - introspect_cmd += " --user-pip={0}".format(constants.CONTEXT_FILES['python']) - bindep_exists = os.path.exists(os.path.join(self.build_outputs_dir, constants.CONTEXT_FILES['system'])) - if bindep_exists: - relative_bindep_path = os.path.join(constants.user_content_subfolder, constants.CONTEXT_FILES['system']) - self.steps.append(f"ADD {relative_bindep_path} {constants.CONTEXT_FILES['system']}") - introspect_cmd += " --user-bindep={0}".format(constants.CONTEXT_FILES['system']) - - introspect_cmd += " --write-bindep=/tmp/src/bindep.txt --write-pip=/tmp/src/requirements.txt" - - self.steps.append(introspect_cmd) - self.steps.append("RUN assemble") - - return self.steps - - def prepare_system_runtime_deps_steps(self): - self.steps.extend([ - "COPY --from=builder /output/ /output/", - "RUN /output/install-from-bindep && rm -rf /output/wheels", - ]) - - return self.steps - - def prepare_galaxy_stage_steps(self): - self.steps.extend([ - "", - "FROM $EE_BASE_IMAGE as galaxy", - "ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS={}".format( - self.definition.build_arg_defaults['ANSIBLE_GALAXY_CLI_COLLECTION_OPTS'] - ), - "ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS={}".format( - self.definition.build_arg_defaults['ANSIBLE_GALAXY_CLI_ROLE_OPTS'] - ), - "USER root", - "" - ]) - - return self.steps - - def prepare_build_stage_steps(self): - self.steps.extend([ - "", - "FROM $EE_BUILDER_IMAGE as builder" - "", - ]) - - return self.steps - - def prepare_final_stage_steps(self): - self.steps.extend([ - "", - "FROM $EE_BASE_IMAGE", - "USER root" - "", - ]) - return self.steps - - def prepare_galaxy_copy_steps(self): - if self.definition.get_dep_abs_path('galaxy'): - self.steps.extend(GalaxyCopySteps()) - return self.steps - - def write(self): - with open(self.path, 'w') as f: - for step in self.steps: - f.write(step + self.newline_char) - - return True diff --git a/ansible_builder/steps.py b/ansible_builder/steps.py deleted file mode 100644 index c7d75c33..00000000 --- a/ansible_builder/steps.py +++ /dev/null @@ -1,101 +0,0 @@ -import sys -import os - -from . import constants -from .exceptions import DefinitionError - - -class Steps: - def __iter__(self): - return iter(self.steps) - - -class AdditionalBuildSteps(Steps): - def __init__(self, additional_steps): - """Allows for additional prepended / appended build steps to be - in the Containerfile or Dockerfile. - """ - self.steps = [] - if isinstance(additional_steps, str): - lines = additional_steps.strip().splitlines() - elif isinstance(additional_steps, list): - lines = additional_steps - else: - raise DefinitionError( - """ - Error: Unknown type found for additional_build_steps; must be list or multi-line string. - """ - ) - sys.exit(1) - self.steps.extend(lines) - - def __iter__(self): - return iter(self.steps) - - -class BuildContextSteps(Steps): - def __init__(self): - self.steps = [ - "ADD {0} /build".format(constants.user_content_subfolder), - "WORKDIR /build", - "", - ] - - -class GalaxyInstallSteps(Steps): - def __init__(self, requirements_naming, galaxy_keyring, galaxy_ignore_signature_status_codes, galaxy_required_valid_signature_count): - """Assumes given requirements file name and keyring has been placed in the build context. - - :param str galaxy_keyring: GPG keyring file used by ansible-galaxy to opportunistically validate collection signatures. - :param str galaxy_required_valid_signature_count: Number of sigs (prepend + to disallow no sig) required for ansible-galaxy to accept collections. - :param str galaxy_ignore_signature_status_codes: GPG Status codes to ignore when validating galaxy collections. - """ - - env = "" - install_opts = f"-r {requirements_naming} --collections-path \"{constants.base_collections_path}\"" - - if galaxy_ignore_signature_status_codes: - for code in galaxy_ignore_signature_status_codes: - install_opts += f" --ignore-signature-status-code {code}" - - if galaxy_required_valid_signature_count: - install_opts += f" --required-valid-signature-count {galaxy_required_valid_signature_count}" - - if galaxy_keyring: - install_opts += f" --keyring \"{galaxy_keyring}\"" - else: - # We have to use the environment variable to disable signature - # verification because older versions (<2.13) of ansible-galaxy do - # not support the --disable-gpg-verify option. We don't use ENV in - # the Containerfile since we need it only during the build and not - # in the final image. - env = "ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 " - - self.steps = [ - f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {requirements_naming} --roles-path \"{constants.base_roles_path}\"", - f"RUN {env}ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS {install_opts}", - ] - - -class GalaxyCopySteps(Steps): - def __init__(self): - """Assumes given requirements file name has been placed in the build context - """ - self.steps = [] - self.steps.extend([ - "", - "COPY --from=galaxy {0} {0}".format( - os.path.dirname(constants.base_collections_path.rstrip('/')) # /usr/share/ansible - ), - "", - ]) - - -class AnsibleConfigSteps(Steps): - def __init__(self, context_file): - """Copies a user's ansible.cfg file for accessing Galaxy server""" - self.steps = [] - self.steps.extend([ - f"ADD {context_file} ~/.ansible.cfg", - "", - ]) diff --git a/ansible_builder/user_definition.py b/ansible_builder/user_definition.py index 211ebd5c..483a1f80 100644 --- a/ansible_builder/user_definition.py +++ b/ansible_builder/user_definition.py @@ -1,22 +1,18 @@ import os import textwrap +import tempfile import yaml +from pathlib import Path +from typing import Callable + from . import constants from .exceptions import DefinitionError +from .ee_schema import validate_schema -ALLOWED_KEYS_V1 = [ - 'version', - 'build_arg_defaults', - 'dependencies', - 'ansible_config', - 'additional_build_steps', -] - -ALLOWED_KEYS_V2 = [ - 'images', -] +# HACK: manage lifetimes more carefully +_tempfiles: list[Callable] = [] class ImageDescription: @@ -97,6 +93,10 @@ def __init__(self, filename): # Set default values for the build arguments. User supplied values # are set later during validation. self.build_arg_defaults = constants.build_arg_defaults.copy() + if self.version > 2: + # v3 and higher no longer supports a builder image so make + # sure this value is cleared of the default value. + self.build_arg_defaults['EE_BUILDER_IMAGE'] = None # Attributes used for creating podman container policies. These will be None # if no 'images' section is present in the EE, or an ImageDescription object otherwise. @@ -106,12 +106,12 @@ def __init__(self, filename): @property def version(self): """ - Version of the EE file. + Integer version of the EE file. If no version is specified, assume version 1 (for backward compat). """ version = self.raw.get('version', 1) - return str(version) + return version @property def ansible_config(self): @@ -121,11 +121,43 @@ def ansible_config(self): return None return str(ansible_config) - def get_additional_commands(self): + @property + def additional_build_steps(self): """Gets additional commands from the exec env file, if any are specified. """ - commands = self.raw.get('additional_build_steps') - return commands + return self.raw.get('additional_build_steps') + + @property + def python_package_name(self): + return self.raw.get('dependencies', {}).get('python_interpreter', {}).get('package_name', None) + + @property + def python_path(self): + return self.raw.get('dependencies', {}).get('python_interpreter', {}).get('python_path', None) + + @property + def ansible_core_ref(self): + return self.raw.get('dependencies', {}).get('ansible_core', None) + + @property + def ansible_runner_ref(self): + return self.raw.get('dependencies', {}).get('ansible_runner', None) + + @property + def ansible_ref_install_list(self): + return ' '.join([r for r in (self.ansible_core_ref, self.ansible_runner_ref) if r]) or None + + @property + def additional_build_files(self): + return self.raw.get('additional_build_files', []) + + @property + def container_init(self): + return self.raw.get('options', {}).get('container_init', {}) + + @property + def options(self): + return self.raw.get('options', {}) def get_dep_abs_path(self, entry): """Unique to the user EE definition, files can be referenced by either @@ -137,93 +169,50 @@ def get_dep_abs_path(self, entry): if not req_file: return None + # HACK: jamming in prototype support for inline deps listing, tempfile handling is ass + if (is_list := isinstance(req_file, list)) or (isinstance(req_file, str) and '\n' in req_file): + tf = tempfile.NamedTemporaryFile('w') + if is_list: + tf.write('\n'.join(req_file)) + else: + tf.write(req_file) + _tempfiles.append(tf) + tf.flush() # don't close, it'll clean up on GC + req_file = tf.name + if not isinstance(req_file, str): + return None + if os.path.isabs(req_file): return req_file return os.path.join(self.reference_path, req_file) - def _validate_root_keys(self): - """ - Identify any invalid top-level keys in the execution environment file. - - :raises: DefinitionError exception if any invalid keys are identified. + def _validate_additional_build_files(self): """ - def_file_dict = self.raw - yaml_keys = set(def_file_dict.keys()) - - valid_keys = set(ALLOWED_KEYS_V1) - if self.version == '2': - valid_keys = valid_keys.union(set(ALLOWED_KEYS_V2)) - - invalid_keys = yaml_keys - valid_keys + Check that entries in additional_build_files look correct. - if invalid_keys: - raise DefinitionError(textwrap.dedent( - f""" - Error: Unknown yaml key(s), {invalid_keys}, found in the definition file.\n - Allowed options are: - {valid_keys} - """) - ) - - def _validate_v2(self): - """ - Validate all execution environment file, version 2, keys. + The 'dest' values are checked for the correct format. Since 'src' can + be a file glob or an absolute or relative path, it is not checked. :raises: DefinitionError exception if any errors are found. """ + for entry in self.additional_build_files: + dest = Path(entry['dest']) + if dest.is_absolute() or '..' in dest.parts: + raise DefinitionError(f"'dest' must not be an absolute path or contain '..': {dest}") - if self.version == "1": - return - - images = self.raw.get('images', {}) - - # The base and builder images MUST be defined in the 'images' section only. - bad = self.raw.get('build_arg_defaults') - if bad: - if 'EE_BASE_IMAGE' in bad or 'EE_BUILDER_IMAGE' in bad: - raise DefinitionError("Error: Version 2 does not allow defining EE_BASE_IMAGE or EE_BUILDER_IMAGE in 'build_arg_defaults'") - - if images: - self.base_image = ImageDescription(images, 'base_image') - self.builder_image = ImageDescription(images, 'builder_image') - - # Must set these values so that Containerfile uses the proper images - if self.base_image.name: - self.build_arg_defaults['EE_BASE_IMAGE'] = self.base_image.name - - if self.builder_image.name: - self.build_arg_defaults['EE_BUILDER_IMAGE'] = self.builder_image.name - - def _validate_v1(self): + def validate(self): """ - Validate all execution environment file, version 1, keys. + Check that all specified keys in the definition file are valid. :raises: DefinitionError exception if any errors are found. """ - - if self.raw.get('dependencies') is not None: - if not isinstance(self.raw.get('dependencies'), dict): - raise DefinitionError(textwrap.dedent( - f""" - Error: Unknown type {type(self.raw.get('dependencies'))} found for dependencies, must be a dict.\n - Allowed options are: - {list(constants.CONTEXT_FILES.keys())} - """) - ) - - dependencies_keys = set(self.raw.get('dependencies')) - invalid_dependencies_keys = dependencies_keys - set(constants.CONTEXT_FILES.keys()) - if invalid_dependencies_keys: - raise DefinitionError(textwrap.dedent( - f""" - Error: Unknown yaml key(s), {invalid_dependencies_keys}, found in dependencies.\n - Allowed options are: - {list(constants.CONTEXT_FILES.keys())} - """) - ) + validate_schema(self.raw) for item in constants.CONTEXT_FILES: + # HACK: non-file deps for dynamic base/builder + if not constants.CONTEXT_FILES[item]: + continue requirement_path = self.get_dep_abs_path(item) if requirement_path: if not os.path.exists(requirement_path): @@ -232,54 +221,19 @@ def _validate_v1(self): # Validate and set any user-specified build arguments build_arg_defaults = self.raw.get('build_arg_defaults') if build_arg_defaults: - if not isinstance(build_arg_defaults, dict): - raise DefinitionError( - f"Error: Unknown type {type(build_arg_defaults)} found for build_arg_defaults; " - f"must be a dict." - ) - unexpected_keys = set(build_arg_defaults) - set(constants.build_arg_defaults) - if unexpected_keys: - raise DefinitionError( - f"Keys {unexpected_keys} are not allowed in 'build_arg_defaults'." - ) for key, user_value in build_arg_defaults.items(): - if user_value and not isinstance(user_value, str): - raise DefinitionError( - f"Expected build_arg_defaults.{key} to be a string; " - f"Found a {type(user_value)} instead." - ) self.build_arg_defaults[key] = user_value - additional_cmds = self.get_additional_commands() - if additional_cmds: - if not isinstance(additional_cmds, dict): - raise DefinitionError(textwrap.dedent(""" - Expected 'additional_build_steps' in the provided definition file to be a dictionary - with keys 'prepend' and/or 'append'; found a {0} instead. - """).format(type(additional_cmds).__name__)) - - expected_keys = frozenset(('append', 'prepend')) - unexpected_keys = set(additional_cmds) - expected_keys - if unexpected_keys: - raise DefinitionError( - f"Keys {*unexpected_keys,} are not allowed in 'additional_build_steps'." - ) - - ansible_config_path = self.raw.get('ansible_config') - if ansible_config_path: - if not isinstance(ansible_config_path, str): - raise DefinitionError(textwrap.dedent(f""" - Expected 'ansible_config' in the provided definition file to - be a string; found a {type(ansible_config_path).__name__} instead. - """)) + if self.version > 1: + images = self.raw.get('images', {}) + if images: + self.base_image = ImageDescription(images, 'base_image') - def validate(self): - """ - Check that all specified keys in the definition file are valid. - - :raises: DefinitionError exception if any errors are found. - """ + # Must set these values so that Containerfile uses the proper images + if self.base_image.name: + self.build_arg_defaults['EE_BASE_IMAGE'] = self.base_image.name + if 'builder_image' in images: + self.builder_image = ImageDescription(images, 'builder_image') + self.build_arg_defaults['EE_BUILDER_IMAGE'] = self.builder_image.name - self._validate_root_keys() - self._validate_v1() - self._validate_v2() + self._validate_additional_build_files() diff --git a/ansible_builder/utils.py b/ansible_builder/utils.py index 1173600c..5234d40b 100644 --- a/ansible_builder/utils.py +++ b/ansible_builder/utils.py @@ -5,7 +5,9 @@ import shutil import subprocess import sys + from collections import deque +from pathlib import Path from .colors import MessageColors from . import constants @@ -138,6 +140,29 @@ def write_file(filename: str, lines: list) -> bool: return True +def copy_directory(source_dir: Path, dest: Path): + """ + Recursively copy a source directory to a path in the context directory. + + In order to not corrupt the build context cache, if it should exist, we + attempt to copy files within the source directory to the context directory + if necessary by utilizing copy_file() on each file, rather than a blind + recursive copy. + """ + + if not source_dir.is_dir(): + raise Exception(f"Expected a directory at '{source_dir}'") + + for child in source_dir.iterdir(): + copy_location = dest / child.name + if child.is_dir(): + # a subdir of our build destination directory + copy_location.mkdir(exist_ok=True) + copy_directory(child, copy_location) + else: + copy_file(str(child), str(copy_location)) + + def copy_file(source: str, dest: str) -> bool: should_copy = False diff --git a/demo/v3_demo/execution-environment.yml b/demo/v3_demo/execution-environment.yml new file mode 100644 index 00000000..00e052c7 --- /dev/null +++ b/demo/v3_demo/execution-environment.yml @@ -0,0 +1,37 @@ +--- +version: 3 + +images: + base_image: + name: quay.io/centos/centos:stream8 # vanilla image! + # name: quay.io/centos/centos:stream9 # vanilla image! + # name: registry.fedoraproject.org/fedora:36 # vanilla image! + # name: registry.access.redhat.com/ubi9/ubi:latest # vanilla image! + +# no longer required, we do the needful inline, but should still work if someone sets it +# builder_image: +# name: quay.io/centos/centos:stream9 + +dependencies: + python_interpreter: + package_name: python39 + python_path: /usr/bin/python3.9 + + ansible_core: https://github.com/ansible/ansible/archive/refs/tags/v2.13.2.tar.gz # install from a GH ref tarball + + ansible_runner: ansible-runner==2.2.1 # install from PyPI + + # FIXME: inline splat-as-string sucks, make separate keys for collections and roles + galaxy: | + collections: + - community.windows # depends on ansible.windows + - ansible.utils # injects lots of system and Python deps + # - ansible.netcommon # breaks on ubi9, shouldn't have python3-Cython as an install dep! + + python: + - pywinrm + + system: + - iputils [platform:rpm] + +# FIXME: add more hook points for dynamic base/builder step injection, eg, manual builder build-time package installation diff --git a/docs/definition.rst b/docs/definition.rst index ec0a4608..6bde99a6 100644 --- a/docs/definition.rst +++ b/docs/definition.rst @@ -4,189 +4,244 @@ Execution Environment Definition The execution environment (EE) definition file supports multiple versions. * Version 1: Supported by all ``ansible-builder`` versions. - * Version 2: Supported by ``ansible-builder`` versions ``1.2`` and higher. - -:ref:`Version 2 ` adds the capability to optionally use and verify -signed container images. This feature is only supported with the ``podman`` -container runtime. + * Version 2: Supported by ``ansible-builder`` versions ``1.2`` and later. + * Version 3: Supported by ``ansible-builder`` versions after ``1.2``. If the EE file does not specify a version, version 1 will be assumed. -.. _version-1: +.. note:: + + This version of the documentation discusses only the latest format version. + For further details on older formats, reference previous versions of the + documentation. -Version 1 Format +Version 3 Format ---------------- -An example version 1 execution environment definition schema is as follows: +This version of the EE definition file offers substantially more configurability +and functionality over previous versions. + +Below is an example version 3 EE file: .. code:: yaml --- - version: 1 + version: 3 build_arg_defaults: - EE_BASE_IMAGE: 'quay.io/ansible/ansible-runner:latest' - - ansible_config: 'ansible.cfg' + ANSIBLE_GALAXY_CLI_COLLECTION_OPTS: '--pre' dependencies: galaxy: requirements.yml - python: requirements.txt + python: + - six + - psutil system: bindep.txt + images: + base_image: + name: registry.redhat.io/ansible-automation-platform-21/ee-minimal-rhel8:latest + + additional_build_files: + - src: files/ansible.cfg + dest: configs + additional_build_steps: - prepend: | + prepend_galaxy: + - ADD _build/configs/ansible.cfg ~/.ansible.cfg + + prepend_final: | RUN whoami RUN cat /etc/os-release - append: + append_final: - RUN echo This is a post-install command! - RUN ls -la /etc +Configuration +^^^^^^^^^^^^^ -The following keys are supported in this version of the EE file: +Below are listed the configuration YAML keys that you may use in the v3 +format. -version -^^^^^^^ +additional_build_files +********************** -This integer value defines the version of the EE file. If not specified, the -default of ``1`` will be used. +This section allows you to add any file to the build context directory. These can +then be referenced or copied by `additional_build_steps` during any build stage. +The format is a list of dictionary values, each with a ``src`` and ``dest`` key and value. -build_arg_defaults -^^^^^^^^^^^^^^^^^^ +Each list item must be a dictionary containing the following (non-optional) keys: -Default values for build args can be specified in the definition file in -the ``build_arg_defaults`` section as a dictionary. This is an alternative -to using the ``--build-arg`` CLI flag. + ``src`` + Specifies the source file(s) to copy into the build context directory. This + may either be an absolute path (e.g., ``/home/user/.ansible.cfg``), + or a path that is relative to the execution environment file. Relative paths may be + a glob expression matching one or more files (e.g. ``files/*.cfg``). Note + that an absolute path may *not* include a regular expression. If ``src`` is + a directory, the entire contents of that directory are copied to ``dest``. -Build args used by ``ansible-builder`` are the following: + ``dest`` + Specifies a subdirectory path underneath the ``_build`` subdirectory of the + build context directory that should contain the source file(s) (e.g., ``files/configs``). + This may not be an absolute path or contain ``..`` within the path. This directory + will be created for you if it does not exist. -``ANSIBLE_GALAXY_CLI_COLLECTION_OPTS`` - This allows the user to pass the '--pre' flag (or others) to enable the installation of pre-releases collections. +additional_build_steps +********************** -``ANSIBLE_GALAXY_CLI_ROLE_OPTS`` - This allows the user to pass the flags to the Role installation. +This section enables you to specify custom build commands for any build phase. +These commands will be inserted directly into the instruction file for the +container runtime (e.g., `Containerfile` or `Dockerfile`). They will need to +conform to any rules required for the runtime system. -``EE_BASE_IMAGE`` - This string value specifies the parent image for the execution environment. +Below are the valid keys for this section. Each supports either a multi-line +string, or a list of strings. -``EE_BUILDER_IMAGE`` - This string value specifies the image used for compiling type tasks. + ``prepend_base`` + Commands to insert before building of the base image. -Values given inside of ``build_arg_defaults`` will be hard-coded into the -Containerfile, so they will persist if ``podman build`` is called manually. + ``append_base`` + Commands to insert after building of the base image. -If the same variable is specified in the CLI ``--build-arg`` flag, -the CLI value will take higher precedence. + ``prepend_galaxy`` + Commands to insert before building of the galaxy image. -ansible_config -^^^^^^^^^^^^^^ + ``append_galaxy`` + Commands to insert after building of the galaxy image. -When using an ``ansible.cfg`` file to pass a token and other settings for a -private account to an Automation Hub server, listing the config file path here -(as a string) will enable it to be included as a build argument in the initial -phase of the build. + ``prepend_builder`` + Commands to insert before building of the builder image. -dependencies -^^^^^^^^^^^^ + ``append_builder`` + Commands to insert after building of the builder image. -This section is a dictionary value that is used to define the Ansible Galaxy, -Python, and system dependencies that must be installed into the final container. -Valid keys for this section are: + ``prepend_final`` + Commands to insert before building of the final image. + + ``append_final`` + Commands to insert after building of the final image. -``galaxy`` - This string value is the path to a file containing the Ansible Galaxy - dependencies to be installed with the ``ansible-galaxy collection install -r ...`` - command. +build_arg_defaults +****************** + +Default values for build args can be specified in the definition file in +the ``build_arg_defaults`` section as a dictionary. This is an alternative +to using the :ref:`build-arg` CLI flag. - The supplied value may be a relative path from the directory of the execution - environment definition's folder, or an absolute path. +Build args used by ``ansible-builder`` are the following: -``python`` - This string value is the path to a file containing the Python dependencies - to be installed with the ``pip install -r ...`` command. + ``ANSIBLE_GALAXY_CLI_COLLECTION_OPTS`` + This allows the user to pass the `--pre` flag (or others) to enable the installation of pre-release collections. - The supplied value may be a relative path from the directory of the execution - environment definition's folder, or an absolute path. + ``ANSIBLE_GALAXY_CLI_ROLE_OPTS`` + This allows the user to pass any flags, such as `--no-deps`, to the role installation. -``system`` - This string value is points to a - `bindep `__ - requirements file. This will be processed by ``bindep`` and then passed - to ``dnf``, other platforms are not yet supported. +Values given inside of ``build_arg_defaults`` will be hard-coded into the +Containerfile, so they will persist if ``podman build`` is called manually. -additional_build_steps -^^^^^^^^^^^^^^^^^^^^^^ +If the same variable is specified in the CLI :ref:`build-arg` flag, +the CLI value will take higher precedence. -Additional commands may be specified in the ``additional_build_steps`` -section, either for before the main build steps (``prepend``) or after -(``append``). The syntax needs to be one of the following: +dependencies +************ -- a multi-line string (example shown in the ``prepend`` section above) -- a list (as shown via ``append``) +This section allows you to describe any dependencies that will need to be +installed into the final image. -.. _version-2: +The following keys are valid for this section: -Version 2 Format ----------------- + ``ansible_core`` + The version of the Ansible python package to be installed. This value is + passed directly to `pip` for installation and can be in any format that + pip supports. Below are some example values: -With the version 2 format, an execution environment definition may specify -a base and builder container image whose signature must be validated before -builder will build the resulting image, based on the value of the -:ref:`container-policy` CLI option. + .. code:: yaml -.. note:: + ansible_core: ansible-core + ansible_core: ansible-core==2.14.3 + ansible_core: https://github.com/example_user/ansible/archive/refs/heads/ansible.tar.gz + + ``ansible_runner`` + The version of the Ansible Runner python package to be installed. This value is + passed directly to `pip` for installation and can be in any format that + pip supports. Below are some example values: + + .. code:: yaml + + ansible_runner: ansible-runner + ansible_runner: ansible-runner==2.3.2 + ansible_runner: https://github.com/example_user/ansible-runner/archive/refs/heads/ansible-runner.tar.gz - Although builder will create a `policy.json` file (see below) to control Podman image - validation, it is up to the user to properly configure the Podman runtime to - talk to the registries needed. This may include defining the sigstore for each - registry, using secure connections (or not), etc. Such configuration is beyond - the scope of this document. + ``galaxy`` + Galaxy installation requirements. This may either be a filename, or a string + representation of the file contents (see below for an example). -This format is identical to the :ref:`version 1 format `, except for -the following changes: + ``python`` + The Python installation requirements. This may either be a filename, or a + list of requirements (see below for an example). -1. A new ``images`` key is added that supports more complex definitions of the - base and builder images. -2. Defining ``EE_BASE_IMAGE`` or ``EE_BUILDER_IMAGE`` in the ``build_args_defaults`` - section, or with the :ref:`build-arg` CLI option, is no longer allowed. + ``python_interpreter`` + A dictionary that defines the Python system package name to be installed by + dnf (``package_name``) and/or a path to the Python interpreter to be used + (``python_path``). -An example version 2 execution environment definition schema is as follows: + ``system`` + The system requirements to be installed in bindep format. This may either + be a filename, or a list of requirements (see below for an example). + +The following example uses filenames that contain the various dependencies: .. code:: yaml - --- - version: 2 + dependencies: + python: requirements.txt + system: bindep.txt + galaxy: requirements.yml + ansible_core: ansible-core==2.14.2 + ansible_runner: ansible-runner==2.3.1 + python_interpreter: + package_name: "python310" + python_path: "/usr/bin/python3.10" - build_arg_defaults: - ANSIBLE_GALAXY_CLI_COLLECTION_OPTS: '--pre' +And this example uses inline values: - ansible_config: 'ansible.cfg' +.. code:: yaml dependencies: - galaxy: requirements.yml - python: requirements.txt - system: bindep.txt + python: + - pywinrm + system: + - iputils [platform:rpm] + galaxy: | + collections: + - community.windows + - ansible.utils + ansible_core: ansible-core==2.14.2 + ansible_runner: ansible-runner==2.3.1 + python_interpreter: + package_name: "python310" + python_path: "/usr/bin/python3.10" - images: - base_image: - name: registry.redhat.io/ansible-automation-platform-21/ee-minimal-rhel8:latest - builder_image: - name: my-mirror.example.com/aap-mirror/ansible-builder-rhel8:latest - signature_original_name: registry.redhat.io/ansible-automation-platform-21/ansible-builder-rhel8:latest +.. note:: + + The ``|`` symbol is a YAML operator that allows you to define a block of text + that may contain newline characters as a literal string. Because the ``galaxy`` + requirements content is expressed in YAML, we need this value to be a string + of YAML so that we can pass it along to ``ansible-galaxy``. images -^^^^^^ +****** -This section is a dictionary that is used to define the base and builder images. -How this data is used in relation to a Podman +This section is a dictionary that is used to define the base image to be used. +Verification of signed container images is supported with the ``podman`` container +runtime. How this data is used in relation to a Podman `policy.json `_ file for container image signature validation depends on the value of the :ref:`container-policy` CLI option. * ``ignore_all`` policy: Generate a `policy.json` file in the build :ref:`context directory ` where no signature validation is - performed. This duplicates the functionality under the - :ref:`version 1 format`. + performed. * ``system`` policy: Signature validation is performed using pre-existing `policy.json` files in standard system locations. ``ansible-builder`` assumes @@ -200,14 +255,39 @@ file for container image signature validation depends on the value of the Valid keys for this section are: -``base_image`` - A dictionary defining the parent image for the execution environment. A ``name`` - key must be supplied with the container image to use. Use the ``signature_original_name`` - key if the image is mirrored within your repository, but signed with the original - image's signature key. Image names *MUST* contain a tag, such as ``:latest``. - -``builder_image`` - A dictionary defining the image used for compiling type tasks. A ``name`` - key must be supplied with the container image to use. Use the ``signature_original_name`` - key if the image is mirrored within your repository, but signed with the original - image's signature key. Image names *MUST* contain a tag, such as ``:latest``. + ``base_image`` + A dictionary defining the parent image for the execution environment. A ``name`` + key must be supplied with the container image to use. Use the ``signature_original_name`` + key if the image is mirrored within your repository, but signed with the original + image's signature key. Image names *MUST* contain a tag, such as ``:latest``. + +options +******* + +This section is a dictionary that contains keywords/options that can affect +builder runtime functionality. Valid keys for this section are: + + ``package_manager_path`` + A string with the path to the package manager (dnf or microdnf) to use. + The default is ``/usr/bin/dnf``. This value will be used to install a + python interpreter, if specified in ``dependencies``, and during the + build phase by the ``assemble`` script. + + ``skip_ansible_check`` + This boolean value controls whether or not the check for an installation + of Ansible and Ansible Runner is performed on the final image. Set this + value to ``True`` to not perform this check. The default is ``False``. + +Example ``options`` section: + +.. code:: yaml + + options: + package_manager_path: /usr/bin/microdnf + skip_ansible_check: True + +version +******* + +This is an integer value that sets the version of the format being used. This +must be ``3`` for the v3 version. diff --git a/docs/usage.rst b/docs/usage.rst index d3376063..f9ada6e1 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -215,6 +215,24 @@ To remove unused images created after the build process: already exists or created by ansible-builder build process. +``--squash`` +************ + +This option controls the final image layer squashing. Valid values are: + +* ``new``: Squash all of the final image's new layers into a single new layer + (preexisting layers are not squashed). This is the default. +* ``all``: Squash all of the final image's layers, including those inherited + from the base image, into a single new layer. +* ``off``: Turn off layer squashing. + +.. note:: + + This flag is compatible only with the ``podman`` runtime and will be ignored + for any other runtime. Docker is not supported since layer image squashing is + considered an experimental feature. + + The ``create`` command ---------------------- diff --git a/requirements.txt b/requirements.txt index b3708cc2..46eb12fe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ PyYAML requirements-parser bindep +jsonschema diff --git a/setup.cfg b/setup.cfg index d9a4a9f7..28c02eb5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,7 +17,6 @@ classifiers = License :: OSI Approved :: Apache Software License Natural Language :: English Operating System :: POSIX - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 diff --git a/setup.py b/setup.py index b3bbfdbe..4753d25f 100644 --- a/setup.py +++ b/setup.py @@ -5,5 +5,5 @@ setup( setup_requires=['pbr'], pbr=True, - python_requires=">=3.8", + python_requires=">=3.9", ) diff --git a/test/data/ansible-test.cfg b/test/data/ansible_cfg_for_galaxy/ansible-test.cfg similarity index 100% rename from test/data/ansible-test.cfg rename to test/data/ansible_cfg_for_galaxy/ansible-test.cfg diff --git a/test/data/ansible_cfg_for_galaxy/requirements.yml b/test/data/ansible_cfg_for_galaxy/requirements.yml new file mode 100644 index 00000000..e69de29b diff --git a/test/data/foo/requirements.yml b/test/data/nested_galaxy_file/foo/requirements.yml similarity index 100% rename from test/data/foo/requirements.yml rename to test/data/nested_galaxy_file/foo/requirements.yml diff --git a/test/data/nested-galaxy.yml b/test/data/nested_galaxy_file/nested-galaxy.yml similarity index 100% rename from test/data/nested-galaxy.yml rename to test/data/nested_galaxy_file/nested-galaxy.yml diff --git a/test/data/v3/check_ansible/ee-missing-ansible.yml b/test/data/v3/check_ansible/ee-missing-ansible.yml new file mode 100644 index 00000000..de672e87 --- /dev/null +++ b/test/data/v3/check_ansible/ee-missing-ansible.yml @@ -0,0 +1,14 @@ +--- +version: 3 + +images: + base_image: + name: localhost:8080/testrepo/ubi-minimal:latest + +dependencies: + ansible_runner: ansible_runner + python_interpreter: + package_name: python3 + +options: + package_manager_path: /usr/bin/microdnf diff --git a/test/data/v3/check_ansible/ee-missing-runner.yml b/test/data/v3/check_ansible/ee-missing-runner.yml new file mode 100644 index 00000000..95e9486c --- /dev/null +++ b/test/data/v3/check_ansible/ee-missing-runner.yml @@ -0,0 +1,14 @@ +--- +version: 3 + +images: + base_image: + name: localhost:8080/testrepo/ubi-minimal:latest + +dependencies: + ansible_core: ansible_core + python_interpreter: + package_name: python3 + +options: + package_manager_path: /usr/bin/microdnf diff --git a/test/data/v3/check_ansible/ee-skip.yml b/test/data/v3/check_ansible/ee-skip.yml new file mode 100644 index 00000000..7f11e426 --- /dev/null +++ b/test/data/v3/check_ansible/ee-skip.yml @@ -0,0 +1,14 @@ +--- +version: 3 + +images: + base_image: + name: localhost:8080/testrepo/ubi-minimal:latest + +dependencies: + python_interpreter: + package_name: python3 + +options: + skip_ansible_check: True + package_manager_path: /usr/bin/microdnf diff --git a/test/data/v3/complete/ee.yml b/test/data/v3/complete/ee.yml new file mode 100644 index 00000000..e9f1688b --- /dev/null +++ b/test/data/v3/complete/ee.yml @@ -0,0 +1,32 @@ +--- +version: 3 + +images: + base_image: + name: registry.redhat.io/ansible-automation-platform-21/ee-minimal-rhel8:latest + +build_arg_defaults: + ANSIBLE_GALAXY_CLI_COLLECTION_OPTS: '--foo' + ANSIBLE_GALAXY_CLI_ROLE_OPTS: '--bar' + +dependencies: + ansible_core: ansible-core==2.13 + ansible_runner: ansible-runner==2.3.1 + python_interpreter: + package_name: "mypython3" + python_path: "/usr/local/bin/mypython" + galaxy: | + collections: + - ansible.utils + python: + - six + - PyYaml==6.0 + system: + - python311 + - mysql + +additional_build_files: + - src: files/random.cfg + dest: myconfigs + - src: files/data + dest: mydata diff --git a/test/data/v3/complete/files/data/a.dat b/test/data/v3/complete/files/data/a.dat new file mode 100644 index 00000000..e69de29b diff --git a/test/data/v3/complete/files/data/text_files/a.txt b/test/data/v3/complete/files/data/text_files/a.txt new file mode 100644 index 00000000..e69de29b diff --git a/test/data/v3/complete/files/random.cfg b/test/data/v3/complete/files/random.cfg new file mode 100644 index 00000000..e69de29b diff --git a/test/data/v3/pre_and_post/ee.yml b/test/data/v3/pre_and_post/ee.yml new file mode 100644 index 00000000..e4cfe431 --- /dev/null +++ b/test/data/v3/pre_and_post/ee.yml @@ -0,0 +1,25 @@ +--- +version: 3 + +dependencies: + galaxy: requirements.yml + +additional_build_steps: + prepend_base: | + ARG PRE_BASE1 + ARG PRE_BASE2 + append_base: + - ARG POST_BASE1 + - ARG POST_BASE2 + prepend_galaxy: + - ARG PRE_GALAXY + append_galaxy: + - ARG POST_GALAXY + prepend_builder: + - ARG PRE_BUILDER + append_builder: + - ARG POST_BUILDER + prepend_final: + - ARG PRE_FINAL + append_final: + - ARG POST_FINAL diff --git a/test/data/v3/pre_and_post/requirements.yml b/test/data/v3/pre_and_post/requirements.yml new file mode 100644 index 00000000..e69de29b diff --git a/test/integration/test_build.py b/test/integration/test_build.py index 84711cc0..2f2bd399 100644 --- a/test/integration/test_build.py +++ b/test/integration/test_build.py @@ -30,7 +30,7 @@ def test_blank_execution_environment(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'blank' / 'execution-environment.yml' cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' ) result = cli(f'{runtime} run --rm {ee_tag} echo "This is a simple test"') assert 'This is a simple test' in result.stdout, result.stdout @@ -42,7 +42,7 @@ def test_multiple_tags(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'blank' / 'execution-environment.yml' cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} -t testmultitags --container-runtime {runtime}' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} -t testmultitags --container-runtime {runtime}' ) result = cli(f'{runtime} run --rm {ee_tag} echo "test: test_multiple_tags 1"') assert 'test: test_multiple_tags 1' in result.stdout, result.stdout @@ -56,7 +56,7 @@ def test_multiple_tags(cli, runtime, ee_tag, tmp_path, data_dir): def test_user_system_requirement(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'subversion' / 'execution-environment.yml' - command = f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' + command = f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' cli(command) result = cli( f'{runtime} run --rm {ee_tag} svn --help' @@ -69,7 +69,7 @@ def test_collection_system_requirement(cli, runtime, ee_tag, tmp_path, data_dir) bc = tmp_path ee_def = data_dir / 'ansible.posix.at' / 'execution-environment.yml' cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v3' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v3' ) result = cli( f'{runtime} run --rm {ee_tag} at -V' @@ -81,7 +81,7 @@ def test_collection_system_requirement(cli, runtime, ee_tag, tmp_path, data_dir) def test_user_python_requirement(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'pip' / 'execution-environment.yml' - command = f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' + command = f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' cli(command) result = cli( f'{runtime} run --rm {ee_tag} pip3 show awxkit' @@ -98,7 +98,7 @@ def test_user_python_requirement(cli, runtime, ee_tag, tmp_path, data_dir): def test_python_git_requirement(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'needs_git' / 'execution-environment.yml' - command = f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' + command = f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' cli(command) result = cli(f'{runtime} run --rm {ee_tag} pip3 freeze') assert 'flask' in result.stdout.lower(), result.stdout @@ -112,7 +112,7 @@ def test_prepended_steps(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'prepend_steps' / 'execution-environment.yml' cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime}' ) _file = 'Dockerfile' if runtime == 'docker' else 'Containerfile' @@ -128,7 +128,7 @@ def test_build_args_basic(cli, runtime, ee_tag, tmp_path, data_dir): bc = tmp_path ee_def = data_dir / 'build_args' / 'execution-environment.yml' result = cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} --build-arg FOO=bar -v3' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} --build-arg FOO=bar -v3' ) assert 'FOO=bar' in result.stdout @@ -142,7 +142,7 @@ def test_build_args_from_environment(cli, runtime, ee_tag, tmp_path, data_dir): ee_def = data_dir / 'build_args' / 'execution-environment.yml' os.environ['FOO'] = 'secretsecret' result = cli( - f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} --build-arg FOO -v3' + f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag} --container-runtime {runtime} --build-arg FOO -v3' ) assert 'secretsecret' in result.stdout @@ -154,10 +154,11 @@ def test_base_image_build_arg(cli, runtime, ee_tag, tmp_path, data_dir): os.environ['FOO'] = 'secretsecret' # Build with custom image tag, then use that as input to --build-arg EE_BASE_IMAGE - cli(f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag}-custom --container-runtime {runtime} -v3') - cli(f'ansible-builder build -c {bc} -f {ee_def} -t {ee_tag}-custom ' + cli(f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag}-custom --container-runtime {runtime} -v3') + cli(f'ansible-builder build --no-cache -c {bc} -f {ee_def} -t {ee_tag}-custom ' f'--container-runtime {runtime} --build-arg EE_BASE_IMAGE={ee_tag}-custom -v3') - result = cli(f"{runtime} run {ee_tag}-custom cat /base_image") + + result = cli(f"{runtime} run --rm {ee_tag}-custom cat /base_image") assert f"{ee_tag}-custom" in result.stdout @@ -165,7 +166,7 @@ def test_base_image_build_arg(cli, runtime, ee_tag, tmp_path, data_dir): @pytest.mark.xfail(reason='Unreliable on podman') def test_has_pytz(cli, runtime, data_dir, ee_tag, tmp_path): ee_def = data_dir / 'pytz' / 'execution-environment.yml' - cli(f'ansible-builder build -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3') + cli(f'ansible-builder build --no-cache -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3') result = cli(f'{runtime} run --rm {ee_tag} pip3 show pytz') assert 'World timezone definitions, modern and historical' in result.stdout @@ -197,7 +198,7 @@ def test_collection_verification_off(cli, runtime, data_dir, ee_tag, tmp_path): Test that, by default, collection verification is off via the env var. """ ee_def = data_dir / 'ansible.posix.at' / 'execution-environment.yml' - result = cli(f'ansible-builder build -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3') + result = cli(f'ansible-builder build --no-cache -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3') assert "RUN ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 ansible-galaxy" in result.stdout @@ -211,7 +212,8 @@ def test_collection_verification_on(cli, runtime, data_dir, ee_tag, tmp_path): ee_def = data_dir / 'ansible.posix.at' / 'execution-environment.yml' # ansible-galaxy might error (older Ansible), but that should be ok - result = cli(f'ansible-builder build --galaxy-keyring {keyring} -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3', allow_error=True) + result = cli(f'ansible-builder build --no-cache --galaxy-keyring {keyring} -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3', + allow_error=True) keyring_copy = tmp_path / constants.user_content_subfolder / constants.default_keyring_name assert keyring_copy.exists() @@ -224,7 +226,7 @@ def test_collection_verification_on(cli, runtime, data_dir, ee_tag, tmp_path): @pytest.mark.test_all_runtimes def test_galaxy_signing_extra_args(cli, runtime, data_dir, ee_tag, tmp_path): """ - Test that all extr asigning args for gpg are passed into the container file. + Test that all extra signing args for gpg are passed into the container file. """ pytest.xfail("failing configuration (but should work)") @@ -232,7 +234,7 @@ def test_galaxy_signing_extra_args(cli, runtime, data_dir, ee_tag, tmp_path): keyring.touch() ee_def = data_dir / 'ansible.posix.at' / 'execution-environment.yml' - result = cli(f'ansible-builder build -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3 ' + result = cli(f'ansible-builder build --no-cache -c {tmp_path} -f {ee_def} -t {ee_tag} --container-runtime {runtime} -v 3 ' f'--galaxy-keyring {keyring} --galaxy-ignore-signature-status-code 500 ' f'--galaxy-required-valid-signature-count 3', allow_error=True) diff --git a/test/integration/test_create.py b/test/integration/test_create.py index ee40ca90..c7d2291c 100644 --- a/test/integration/test_create.py +++ b/test/integration/test_create.py @@ -108,7 +108,7 @@ def test_collection_verification_on(cli, build_dir_and_ee_yml): def test_galaxy_signing_extra_args(cli, build_dir_and_ee_yml): """ - Test that all extr asigning args for gpg are passed into the container file. + Test that all extra signing args for gpg are passed into the container file. """ ee = [ 'dependencies:', @@ -148,8 +148,8 @@ def test_v2_default_images(cli, build_dir_and_ee_yml): assert containerfile.exists() text = containerfile.read_text() - assert "ARG EE_BASE_IMAGE=quay.io/ansible/ansible-runner:latest" in text - assert "ARG EE_BUILDER_IMAGE=quay.io/ansible/ansible-builder:latest" in text + assert 'ARG EE_BASE_IMAGE="quay.io/ansible/ansible-runner:latest"' in text + assert 'ARG EE_BUILDER_IMAGE="quay.io/ansible/ansible-builder:latest"' in text def test_v2_default_base_image(cli, build_dir_and_ee_yml): @@ -170,8 +170,8 @@ def test_v2_default_base_image(cli, build_dir_and_ee_yml): assert containerfile.exists() text = containerfile.read_text() - assert "ARG EE_BASE_IMAGE=quay.io/ansible/ansible-runner:latest" in text - assert "ARG EE_BUILDER_IMAGE=quay.io/ansible/awx-ee:latest" in text + assert 'ARG EE_BASE_IMAGE="quay.io/ansible/ansible-runner:latest"' in text + assert 'ARG EE_BUILDER_IMAGE="quay.io/ansible/awx-ee:latest"' in text def test_v2_default_builder_image(cli, build_dir_and_ee_yml): @@ -192,5 +192,140 @@ def test_v2_default_builder_image(cli, build_dir_and_ee_yml): assert containerfile.exists() text = containerfile.read_text() - assert "ARG EE_BASE_IMAGE=quay.io/ansible/awx-ee:latest" in text - assert "ARG EE_BUILDER_IMAGE=quay.io/ansible/ansible-builder:latest" in text + assert 'ARG EE_BASE_IMAGE="quay.io/ansible/awx-ee:latest"' in text + assert 'ARG EE_BUILDER_IMAGE="quay.io/ansible/ansible-builder:latest"' in text + + +def test_v3_pre_post_commands(cli, data_dir, tmp_path): + """Test that the pre/post commands are inserted""" + ee_def = data_dir / 'v3' / 'pre_and_post' / 'ee.yml' + r = cli(f'ansible-builder create -c {str(tmp_path)} -f {ee_def}') + assert r.rc == 0 + + containerfile = tmp_path / "Containerfile" + assert containerfile.exists() + text = containerfile.read_text() + + assert "ARG PRE_BASE1\n" in text + assert "ARG PRE_BASE2\n" in text + assert "ARG POST_BASE1\n" in text + assert "ARG POST_BASE2\n" in text + assert "ARG PRE_GALAXY" in text + assert "ARG POST_GALAXY" in text + assert "ARG PRE_BUILDER" in text + assert "ARG POST_BUILDER" in text + assert "ARG PRE_FINAL" in text + assert "ARG POST_FINAL" in text + + +def test_v3_complete(cli, data_dir, tmp_path): + """For testing various elements in a complete v2 EE file""" + ee_def = data_dir / 'v3' / 'complete' / 'ee.yml' + r = cli(f'ansible-builder create -c {str(tmp_path)} -f {ee_def}') + assert r.rc == 0 + + containerfile = tmp_path / "Containerfile" + assert containerfile.exists() + text = containerfile.read_text() + + assert 'ARG EE_BASE_IMAGE="registry.redhat.io/ansible-automation-platform-21/ee-minimal-rhel8:latest"\n' in text + assert 'ARG EE_BUILDER_IMAGE' not in text + assert 'ARG PYCMD="/usr/local/bin/mypython"\n' in text + assert 'ARG PYPKG="mypython3"\n' in text + assert 'ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS="--foo"\n' in text + assert 'ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS="--bar"\n' in text + assert 'ARG ANSIBLE_INSTALL_REFS="ansible-core==2.13 ansible-runner==2.3.1"\n' in text + + # verify that the ansible-galaxy command check is performed + assert 'RUN /output/scripts/check_galaxy' in text + + # verify that the ansible/runner check is performed + assert 'RUN /output/scripts/check_ansible' in text + + # verify that the default init is being installed and that ENTRYPOINT is set + assert "RUN $PYCMD -m pip install --no-cache-dir 'dumb-init==" in text + assert 'ENTRYPOINT ["dumb-init"]' in text + + # check additional_build_files + myconfigs_path = tmp_path / constants.user_content_subfolder / "myconfigs" + assert myconfigs_path.is_dir() + random_file = myconfigs_path / "random.cfg" + assert random_file.exists() + + # Tree structure we expect: + # ├── mydata + # │   ├── a.dat + # │   └── text_files + # │   ├── a.txt + + mydata_path = tmp_path / constants.user_content_subfolder / "mydata" + assert mydata_path.is_dir() + dat_file = mydata_path / "a.dat" + assert dat_file.exists() + text_files = mydata_path / "text_files" + assert text_files.is_dir() + a_text = text_files / "a.txt" + assert a_text.exists() + + +def test_v3_skip_ansible_check(cli, build_dir_and_ee_yml): + """ + Test 'options.skip_ansible_check' works. + """ + ee = [ + 'version: 3', + 'options:', + ' skip_ansible_check: True', + ] + + tmpdir, eeyml = build_dir_and_ee_yml("\n".join(ee)) + cli(f'ansible-builder create -c {tmpdir} -f {eeyml} --output-filename Containerfile') + + containerfile = tmpdir / "Containerfile" + assert containerfile.exists() + text = containerfile.read_text() + + assert "check_ansible" not in text + + +def test_v3_skip_container_init(cli, build_dir_and_ee_yml): + tmpdir, eeyml = build_dir_and_ee_yml( + """ + version: 3 + options: + container_init: {} + """ + ) + cli(f'ansible-builder create -c {tmpdir} -f {eeyml} --output-filename Containerfile') + + containerfile = tmpdir / "Containerfile" + assert containerfile.exists() + text = containerfile.read_text() + + assert "dumb-init" not in text + assert "ENTRYPOINT" not in text + assert 'CMD ["bash"]' not in text + + +def test_v3_custom_container_init(cli, build_dir_and_ee_yml): + tmpdir, eeyml = build_dir_and_ee_yml( + """ + version: 3 + options: + container_init: + package_pip: custominit==1.2.3 + entrypoint: | + ["custominit"] + cmd: | + ["customcmd"] + """ + ) + cli(f'ansible-builder create -c {tmpdir} -f {eeyml} --output-filename Containerfile') + + containerfile = tmpdir / "Containerfile" + assert containerfile.exists() + text = containerfile.read_text() + + assert "pip install --no-cache-dir 'custominit==1.2.3'" in text + assert 'ENTRYPOINT ["custominit"]' in text + assert 'CMD ["customcmd"]' in text diff --git a/test/pulp_integration/test_v3.py b/test/pulp_integration/test_v3.py new file mode 100644 index 00000000..f5ca14c4 --- /dev/null +++ b/test/pulp_integration/test_v3.py @@ -0,0 +1,48 @@ +import pytest +import subprocess + + +class TestV3: + + def test_ansible_check_is_skipped(self, cli, tmp_path, data_dir, podman_ee_tag): + """ + Test that the check_ansible script is skipped will NOT cause build failure. + """ + ee_def = data_dir / 'v3' / 'check_ansible' / 'ee-skip.yml' + + result = cli( + f'ansible-builder build -c {tmp_path} -f {ee_def} -t {podman_ee_tag} ' + f'--container-runtime=podman -v3' + ) + + assert result.rc == 0 + + def test_missing_ansible(self, cli, tmp_path, data_dir, podman_ee_tag): + """ + Test that the check_ansible script will cause build failure if + ansible-core is not installed. + """ + ee_def = data_dir / 'v3' / 'check_ansible' / 'ee-missing-ansible.yml' + + with pytest.raises(subprocess.CalledProcessError) as einfo: + cli( + f'ansible-builder build -c {tmp_path} -f {ee_def} -t {podman_ee_tag} ' + f'--container-runtime=podman -v3' + ) + + assert "ERROR - Missing Ansible installation" in einfo.value.stdout + + def test_missing_runner(self, cli, tmp_path, data_dir, podman_ee_tag): + """ + Test that the check_ansible script will cause build failure if + ansible-runner is not installed. + """ + ee_def = data_dir / 'v3' / 'check_ansible' / 'ee-missing-runner.yml' + + with pytest.raises(subprocess.CalledProcessError) as einfo: + cli( + f'ansible-builder build -c {tmp_path} -f {ee_def} -t {podman_ee_tag} ' + f'--container-runtime=podman -v3' + ) + + assert "ERROR - Missing Ansible Runner installation" in einfo.value.stdout diff --git a/test/requirements.txt b/test/requirements.txt index cc4c9ee8..5782af96 100644 --- a/test/requirements.txt +++ b/test/requirements.txt @@ -1,8 +1,11 @@ coverage flake8 +mypy==1.0.1 pytest pytest-cov pytest-mock pytest-xdist +types-jsonschema +types-pyyaml tox yamllint diff --git a/test/unit/test_cli.py b/test/unit/test_cli.py index f0e6f759..99655c62 100644 --- a/test/unit/test_cli.py +++ b/test/unit/test_cli.py @@ -214,3 +214,63 @@ def test_container_policy_with_version_1(exec_env_definition_file, tmp_path): '--container-runtime', 'podman', '--container-keyring', 'TBD', ]) + + +def test_squash_default(exec_env_definition_file, tmp_path): + ''' + Test the squash CLI option with default. + ''' + content = {'version': 2} + path = str(exec_env_definition_file(content=content)) + aee = prepare(['build', + '-f', path, + '-c', str(tmp_path), + '--container-runtime', 'podman', + ]) + assert '--squash' in aee.build_command + assert '--squash-all' not in aee.build_command + + +def test_squash_all(exec_env_definition_file, tmp_path): + ''' + Test the squash CLI option with 'all'. + ''' + content = {'version': 2} + path = str(exec_env_definition_file(content=content)) + aee = prepare(['build', + '-f', path, + '-c', str(tmp_path), + '--container-runtime', 'podman', + '--squash', 'all' + ]) + assert '--squash-all' in aee.build_command + + +def test_squash_off(exec_env_definition_file, tmp_path): + ''' + Test the squash CLI option with 'off'. + ''' + content = {'version': 2} + path = str(exec_env_definition_file(content=content)) + aee = prepare(['build', + '-f', path, + '-c', str(tmp_path), + '--container-runtime', 'podman', + '--squash', 'off' + ]) + assert '--squash' not in aee.build_command + + +def test_squash_ignored(exec_env_definition_file, tmp_path): + ''' + Test the squash CLI option is ignored with docker. + ''' + content = {'version': 2} + path = str(exec_env_definition_file(content=content)) + aee = prepare(['build', + '-f', path, + '-c', str(tmp_path), + '--container-runtime', 'docker', + '--squash', 'all' + ]) + assert '--squash' not in aee.build_command diff --git a/test/unit/test_containerfile.py b/test/unit/test_containerfile.py new file mode 100644 index 00000000..d68eb21a --- /dev/null +++ b/test/unit/test_containerfile.py @@ -0,0 +1,110 @@ +from ansible_builder import constants +from ansible_builder.containerfile import Containerfile +from ansible_builder.user_definition import UserDefinition + + +def make_containerfile(tmpdir, ee_path, **cf_kwargs): + definition = UserDefinition(ee_path) + build_context = str(tmpdir / '_build') + c = Containerfile(definition, build_context=build_context, container_runtime='podman', **cf_kwargs) + return c + + +def test_insert_custom_steps_list(build_dir_and_ee_yml): + ee_data = [ + 'additional_build_steps:', + ' prepend:', + ' - RUN echo This is the custom steps list test', + ' - RUN whoami', + ] + + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path) + c._insert_custom_steps("prepend") + assert c.steps == ['RUN echo This is the custom steps list test', 'RUN whoami'] + + +def test_insert_custom_steps_string(build_dir_and_ee_yml): + ee_data = [ + 'additional_build_steps:', + ' append: |', + ' RUN echo This is the custom steps string test', + ' RUN whoami', + ] + + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path) + c._insert_custom_steps("append") + assert c.steps == ['RUN echo This is the custom steps string test', 'RUN whoami'] + + +def test_prepare_galaxy_install_steps(build_dir_and_ee_yml): + ee_data = [ + 'dependencies:', + ' galaxy: requirements.yml', + ] + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path) + c._prepare_galaxy_install_steps() + expected = [ + f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {constants.CONTEXT_FILES['galaxy']} --roles-path \"{constants.base_roles_path}\"", + f"RUN ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 ansible-galaxy collection install " + f"$ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r {constants.CONTEXT_FILES['galaxy']} --collections-path \"{constants.base_collections_path}\"" + ] + assert c.steps == expected + + +def test_prepare_galaxy_install_steps_with_keyring(build_dir_and_ee_yml): + ee_data = [ + 'dependencies:', + ' galaxy: requirements.yml', + ] + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path, galaxy_keyring=constants.default_keyring_name) + c._prepare_galaxy_install_steps() + expected = [ + f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {constants.CONTEXT_FILES['galaxy']} --roles-path \"{constants.base_roles_path}\"", + f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r {constants.CONTEXT_FILES['galaxy']} " + f"--collections-path \"{constants.base_collections_path}\" --keyring \"{constants.default_keyring_name}\"" + ] + assert c.steps == expected + + +def test_prepare_galaxy_install_steps_with_sigcount(build_dir_and_ee_yml): + sig_count = 3 + ee_data = [ + 'dependencies:', + ' galaxy: requirements.yml', + ] + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path, + galaxy_keyring=constants.default_keyring_name, + galaxy_required_valid_signature_count=sig_count) + c._prepare_galaxy_install_steps() + expected = [ + f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {constants.CONTEXT_FILES['galaxy']} --roles-path \"{constants.base_roles_path}\"", + f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r {constants.CONTEXT_FILES['galaxy']} " + f"--collections-path \"{constants.base_collections_path}\" --required-valid-signature-count {sig_count} --keyring \"{constants.default_keyring_name}\"" + ] + assert c.steps == expected + + +def test_prepare_galaxy_install_steps_with_ignore_code(build_dir_and_ee_yml): + codes = [1, 2] + ee_data = [ + 'dependencies:', + ' galaxy: requirements.yml', + ] + tmpdir, ee_path = build_dir_and_ee_yml("\n".join(ee_data)) + c = make_containerfile(tmpdir, ee_path, + galaxy_keyring=constants.default_keyring_name, + galaxy_ignore_signature_status_codes=codes) + c._prepare_galaxy_install_steps() + expected = [ + f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r {constants.CONTEXT_FILES['galaxy']} --roles-path \"{constants.base_roles_path}\"", + f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r {constants.CONTEXT_FILES['galaxy']} " + f"--collections-path \"{constants.base_collections_path}\" " + f"--ignore-signature-status-code {codes[0]} --ignore-signature-status-code {codes[1]} " + f"--keyring \"{constants.default_keyring_name}\"" + ] + assert c.steps == expected diff --git a/test/unit/test_introspect.py b/test/unit/test_introspect.py index d950a041..676f2202 100644 --- a/test/unit/test_introspect.py +++ b/test/unit/test_introspect.py @@ -1,6 +1,6 @@ import os -from ansible_builder.introspect import process, process_collection, simple_combine +from ansible_builder._target_scripts.introspect import process, process_collection, simple_combine from ansible_builder.requirements import sanitize_requirements diff --git a/test/unit/test_main.py b/test/unit/test_main.py index 9f58d3de..9382b241 100644 --- a/test/unit/test_main.py +++ b/test/unit/test_main.py @@ -1,6 +1,3 @@ -import os -import pathlib - import pytest from ansible_builder import constants @@ -10,13 +7,13 @@ def test_definition_version(exec_env_definition_file): path = exec_env_definition_file(content={'version': 1}) aee = AnsibleBuilder(filename=path) - assert aee.version == '1' + assert aee.version == 1 def test_definition_version_missing(exec_env_definition_file): path = exec_env_definition_file(content={}) aee = AnsibleBuilder(filename=path) - assert aee.version == '1' + assert aee.version == 1 @pytest.mark.parametrize('path_spec', ('absolute', 'relative')) @@ -84,7 +81,7 @@ def test_base_image_via_definition_file_build_arg(exec_env_definition_file, tmp_ with open(aee.containerfile.path) as f: content = f.read() - assert 'EE_BASE_IMAGE=my-other-custom-image' in content + assert 'EE_BASE_IMAGE="my-other-custom-image"' in content @pytest.mark.test_all_runtimes @@ -92,7 +89,7 @@ def test_build_command(exec_env_definition_file, runtime): content = {'version': 1} path = exec_env_definition_file(content=content) - aee = AnsibleBuilder(filename=path, tag='my-custom-image') + aee = AnsibleBuilder(filename=path, tag=['my-custom-image']) command = aee.build_command assert 'build' and 'my-custom-image' in command @@ -100,30 +97,30 @@ def test_build_command(exec_env_definition_file, runtime): command = aee.build_command assert 'foo/bar/path' in command - assert 'foo/bar/path/Dockerfile' in " ".join(command) + fpath = 'foo/bar/path/' + constants.runtime_files[runtime] + assert fpath in " ".join(command) def test_nested_galaxy_file(data_dir, tmp_path): - if not os.path.exists('test/data/nested-galaxy.yml'): - pytest.skip('Test is only valid when ran from ansible-builder root') - - AnsibleBuilder(filename='test/data/nested-galaxy.yml', build_context=tmp_path).build() + nested_galaxy_file = str(data_dir / 'nested_galaxy_file' / 'nested-galaxy.yml') + AnsibleBuilder(filename=nested_galaxy_file, build_context=tmp_path).build() req_in_bc = tmp_path.joinpath(constants.user_content_subfolder, 'requirements.yml') assert req_in_bc.exists() - req_original = pathlib.Path('test/data/foo/requirements.yml') + req_original = data_dir / 'nested_galaxy_file' / 'foo' / 'requirements.yml' assert req_in_bc.read_text() == req_original.read_text() -def test_ansible_config_for_galaxy(exec_env_definition_file, tmp_path): - if not os.path.exists('test/data/ansible-test.cfg'): - pytest.skip('Test is only valid when ran from ansible-builder root') - - ansible_config_path = 'test/data/ansible-test.cfg' +def test_ansible_config_for_galaxy(exec_env_definition_file, tmp_path, data_dir): + ansible_config_path = str(data_dir / 'ansible_cfg_for_galaxy' / 'ansible-test.cfg') + galaxy_req = str(data_dir / 'ansible_cfg_for_galaxy' / 'requirements.yml') content = { 'version': 1, - 'ansible_config': ansible_config_path + 'ansible_config': ansible_config_path, + 'dependencies': { + 'galaxy': galaxy_req, + }, } path = exec_env_definition_file(content=content) aee = AnsibleBuilder(filename=path, build_context=tmp_path.joinpath('bc')) diff --git a/test/unit/test_steps.py b/test/unit/test_steps.py deleted file mode 100644 index 5f1c4626..00000000 --- a/test/unit/test_steps.py +++ /dev/null @@ -1,67 +0,0 @@ -import pytest -import textwrap - -from ansible_builder import constants -from ansible_builder.steps import AdditionalBuildSteps, GalaxyInstallSteps - - -@pytest.mark.parametrize('verb', ['prepend', 'append']) -def test_additional_build_steps(verb): - additional_build_steps = { - 'prepend': ["RUN echo This is the prepend test", "RUN whoami"], - 'append': textwrap.dedent(""" - RUN echo This is the append test - RUN whoami - """) - } - steps = AdditionalBuildSteps(additional_build_steps[verb]) - - assert len(list(steps)) == 2 - - -def test_galaxy_install_steps(): - steps = list(GalaxyInstallSteps("requirements.txt", None, [], None)) - expected = [ - f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.txt --roles-path \"{constants.base_roles_path}\"", - - f"RUN ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 ansible-galaxy collection install " - f"$ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.txt --collections-path \"{constants.base_collections_path}\"" - ] - assert steps == expected - - -def test_galaxy_install_steps_with_keyring(): - steps = list(GalaxyInstallSteps("requirements.txt", constants.default_keyring_name, [], None)) - expected = [ - f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.txt --roles-path \"{constants.base_roles_path}\"", - - f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.txt " - f"--collections-path \"{constants.base_collections_path}\" --keyring \"{constants.default_keyring_name}\"" - ] - assert steps == expected - - -def test_galaxy_install_steps_with_sig_count(): - sig_count = 3 - steps = list(GalaxyInstallSteps("requirements.txt", constants.default_keyring_name, [], sig_count)) - expected = [ - f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.txt --roles-path \"{constants.base_roles_path}\"", - - f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.txt " - f"--collections-path \"{constants.base_collections_path}\" --required-valid-signature-count {sig_count} " - f"--keyring \"{constants.default_keyring_name}\"" - ] - assert steps == expected - - -def test_galaxy_install_steps_with_ignore_code(): - codes = [1, 2] - steps = list(GalaxyInstallSteps("requirements.txt", constants.default_keyring_name, codes, None)) - expected = [ - f"RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.txt --roles-path \"{constants.base_roles_path}\"", - - f"RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.txt " - f"--collections-path \"{constants.base_collections_path}\" --ignore-signature-status-code {codes[0]} " - f"--ignore-signature-status-code {codes[1]} --keyring \"{constants.default_keyring_name}\"" - ] - assert steps == expected diff --git a/test/unit/test_user_definition.py b/test/unit/test_user_definition.py index 02599718..0c96e4ec 100644 --- a/test/unit/test_user_definition.py +++ b/test/unit/test_user_definition.py @@ -24,46 +24,78 @@ def test_definition_syntax_error(self, data_dir): ), # missing file ( "{'version': 1, 'additional_build_steps': 'RUN me'}", - "Expected 'additional_build_steps' in the provided definition file to be a dictionary\n" - "with keys 'prepend' and/or 'append'; found a str instead." + "'RUN me' is not of type 'object'" ), # not right format for additional_build_steps ( "{'version': 1, 'additional_build_steps': {'middle': 'RUN me'}}", - "Keys ('middle',) are not allowed in 'additional_build_steps'." + "Additional properties are not allowed ('middle' was unexpected)" ), # there are no "middle" build steps ( "{'version': 1, 'build_arg_defaults': {'EE_BASE_IMAGE': ['foo']}}", - "Expected build_arg_defaults.EE_BASE_IMAGE to be a string; Found a instead." + "['foo'] is not of type 'string'" ), # image itself is wrong type ( "{'version': 1, 'build_arg_defaults': {'BUILD_ARRRRRG': 'swashbuckler'}}", - "Keys {'BUILD_ARRRRRG'} are not allowed in 'build_arg_defaults'." + "Additional properties are not allowed ('BUILD_ARRRRRG' was unexpected)" ), # image itself is wrong type ( "{'version': 1, 'ansible_config': ['ansible.cfg']}", - "Expected 'ansible_config' in the provided definition file to\n" - "be a string; found a list instead." + "['ansible.cfg'] is not of type 'string'" ), ( "{'version': 1, 'images': 'bar'}", - "Error: Unknown yaml key(s), {'images'}, found in the definition file." + "Additional properties are not allowed ('images' was unexpected)" ), ( "{'version': 2, 'foo': 'bar'}", - "Error: Unknown yaml key(s), {'foo'}, found in the definition file." + "Additional properties are not allowed ('foo' was unexpected)" ), ( "{'version': 2, 'build_arg_defaults': {'EE_BASE_IMAGE': 'foo'}, 'images': {}}", - "Error: Version 2 does not allow defining EE_BASE_IMAGE or EE_BUILDER_IMAGE in 'build_arg_defaults'" + "Additional properties are not allowed ('EE_BASE_IMAGE' was unexpected)" ), # v1 base image defined in v2 file ( "{'version': 2, 'build_arg_defaults': {'EE_BUILDER_IMAGE': 'foo'}, 'images': {}}", - "Error: Version 2 does not allow defining EE_BASE_IMAGE or EE_BUILDER_IMAGE in 'build_arg_defaults'" + "Additional properties are not allowed ('EE_BUILDER_IMAGE' was unexpected)" ), # v1 builder image defined in v2 file + ( + "{'version': 3, 'additional_build_steps': {'prepend': ''}}", + "Additional properties are not allowed ('prepend' was unexpected)" + ), # 'prepend' is renamed in v2 + ( + "{'version': 3, 'additional_build_files': [ {'src': 'a', 'dest': '../b'} ]}", + "'dest' must not be an absolute path or contain '..': ../b" + ), # destination cannot contain .. + ( + "{'version': 3, 'additional_build_files': [ {'src': 'a', 'dest': '/b'} ]}", + "'dest' must not be an absolute path or contain '..': /b" + ), # destination cannot be absolute + ( + "{'version': 3, 'additional_build_files': [ {'dest': 'b'} ]}", + "'src' is a required property" + ), # source is required + ( + "{'version': 3, 'additional_build_files': [ {'src': 'a'} ]}", + "'dest' is a required property" + ), # destination is required + ( + "{'version': 3, 'ansible_config': 'ansible.cfg' }", + "Additional properties are not allowed ('ansible_config' was unexpected)" + ), # ansible_config not supported in v3 + ( + "{'version': 3, 'images': { 'base_image': {'name': 'base_image:latest'}, 'builder_image': {'name': 'builder_image:latest'} }}", + "Additional properties are not allowed ('builder_image' was unexpected)" + ), # builder_image not suppored in v3 + ( + "{'version': 3, 'options': { 'skip_ansible_check': 'True' } }", + "'True' is not of type 'boolean'" + ), ], ids=[ 'integer', 'missing_file', 'additional_steps_format', 'additional_unknown', 'build_args_value_type', 'unexpected_build_arg', 'config_type', 'v1_contains_v2_key', - 'v2_unknown_key', 'v1_base_image_in_v2', 'v1_builder_image_in_v2' + 'v2_unknown_key', 'v1_base_image_in_v2', 'v1_builder_image_in_v2', 'prepend_in_v3', + 'dest_has_dot_dot', 'dest_is_absolute', 'src_req', 'dest_req', 'ansible_cfg', + 'builder_in_v3', 'opt_skip_ans_chk', ]) def test_yaml_error(self, exec_env_definition_file, yaml_text, expect): path = exec_env_definition_file(yaml_text) @@ -88,7 +120,7 @@ def test_ee_validated_early(self, exec_env_definition_file): path = exec_env_definition_file("{'version': 1, 'bad_key': 1}") with pytest.raises(DefinitionError) as error: AnsibleBuilder(filename=path) - assert "Error: Unknown yaml key(s), {'bad_key'}, found in the definition file." in str(error.value.args[0]) + assert "Additional properties are not allowed ('bad_key' was unexpected)" in str(error.value.args[0]) def test_ee_missing_image_name(self, exec_env_definition_file): path = exec_env_definition_file("{'version': 2, 'images': { 'base_image': {'signature_original_name': ''}}}") @@ -96,6 +128,86 @@ def test_ee_missing_image_name(self, exec_env_definition_file): AnsibleBuilder(filename=path) assert "'name' is a required field for 'base_image'" in str(error.value.args[0]) + def test_v1_to_v2_key_upgrades(self, exec_env_definition_file): + """ Test that EE schema keys are upgraded from version V1 to V2. """ + path = exec_env_definition_file("{'version': 1, 'additional_build_steps': {'prepend': 'value1', 'append': 'value2'}}") + definition = UserDefinition(path) + definition.validate() + add_bld_steps = definition.raw['additional_build_steps'] + assert 'prepend' in add_bld_steps + assert 'append' in add_bld_steps + assert add_bld_steps['prepend'] == 'value1' + assert add_bld_steps['append'] == 'value2' + assert 'prepend_final' in add_bld_steps + assert 'append_final' in add_bld_steps + assert add_bld_steps['prepend_final'] == add_bld_steps['prepend'] + assert add_bld_steps['append_final'] == add_bld_steps['append'] + + def test_v2_images(self, exec_env_definition_file): + """ + Verify that image definition contents are assigned correctly and copied + to the build_arg_defaults equivalents. + """ + path = exec_env_definition_file( + "{'version': 2, 'images': { 'base_image': {'name': 'base_image:latest'}, 'builder_image': {'name': 'builder_image:latest'} }}" + ) + definition = UserDefinition(path) + definition.validate() + + assert definition.base_image.name == "base_image:latest" + assert definition.builder_image.name == "builder_image:latest" + assert definition.build_arg_defaults['EE_BASE_IMAGE'] == "base_image:latest" + assert definition.build_arg_defaults['EE_BUILDER_IMAGE'] == "builder_image:latest" + + def test_v3_ansible_install_refs(self, exec_env_definition_file): + path = exec_env_definition_file( + "{'version': 3, 'dependencies': {'ansible_core': 'ansible-core==2.13', 'ansible_runner': 'ansible-runner==2.3.1'}}" + ) + definition = UserDefinition(path) + definition.validate() + assert definition.ansible_core_ref == "ansible-core==2.13" + assert definition.ansible_runner_ref == "ansible-runner==2.3.1" + assert definition.ansible_ref_install_list == "ansible-core==2.13 ansible-runner==2.3.1" + + def test_v3_inline_python(self, exec_env_definition_file): + """ + Test that inline values for dependencies.python work. + """ + path = exec_env_definition_file( + "{'version': 3, 'dependencies': {'python': ['req1', 'req2']}}" + ) + definition = UserDefinition(path) + definition.validate() + + python_req = definition.raw.get('dependencies', {}).get('python') + assert python_req == ['req1', 'req2'] + + def test_v3_inline_system(self, exec_env_definition_file): + """ + Test that inline values for dependencies.system work. + """ + path = exec_env_definition_file( + "{'version': 3, 'dependencies': {'system': ['req1', 'req2']}}" + ) + definition = UserDefinition(path) + definition.validate() + + system_req = definition.raw.get('dependencies', {}).get('system') + assert system_req == ['req1', 'req2'] + + def test_v3_skip_ansible_check_default(self, exec_env_definition_file): + """ + Test that options.skip_ansible_check defaults to False + """ + path = exec_env_definition_file( + "{'version': 3}" + ) + definition = UserDefinition(path) + definition.validate() + + value = definition.raw.get('options', {}).get('skip_ansible_check') + assert value is False + class TestImageDescription: diff --git a/tox.ini b/tox.ini index 99532a15..4ec09686 100644 --- a/tox.ini +++ b/tox.ini @@ -9,15 +9,16 @@ deps = -r {toxinidir}/test/requirements.txt commands = pytest {posargs} -[testenv:linters] +[testenv:linters{,-py39,-py310}] description = Run code linters commands = flake8 --version flake8 ansible_builder test yamllint --version yamllint -s . + mypy ansible_builder -[testenv:unit{,-py38,-py39,-py310}] +[testenv:unit{,-py39,-py310}] description = Run unit tests commands = pytest {posargs:test/unit} @@ -25,9 +26,11 @@ commands = pytest {posargs:test/unit} # Some of these tests must run serially because of a shared resource # (the system policy.json file). description = Run pulp integration tests -commands = pytest -n 1 -m "serial" {posargs:test/pulp_integration} +commands = + pytest -n 1 -m "serial" {posargs:test/pulp_integration} + pytest -m "not serial" {posargs:test/pulp_integration} -[testenv:integration{,-py38,-py39,-py310}] +[testenv:integration{,-py39,-py310}] description = Run integration tests # rootless podman reads $HOME passenv =