diff --git a/.github/workflows/ci_standalone-iqe-rbac-tests.yml b/.github/workflows/ci_standalone-iqe-rbac-tests.yml index 7276e51464..f435ce2b07 100644 --- a/.github/workflows/ci_standalone-iqe-rbac-tests.yml +++ b/.github/workflows/ci_standalone-iqe-rbac-tests.yml @@ -58,4 +58,4 @@ jobs: # run: sudo apt install software-properties-common -y; sudo add-apt-repository --yes ppa:deadsnakes/ppa; sudo apt install python3.10 - name: run the integration tests - run: ./dev/standalone-iqe-rbac-tests/RUN_INTEGRATION.sh + run: ./dev/standalone-iqe-tests/RUN_RBAC_INTEGRATION.sh diff --git a/.github/workflows/ci_standalone-rbac-on-repos.yml b/.github/workflows/ci_standalone-rbac-on-repos.yml new file mode 100644 index 0000000000..b1c58fda89 --- /dev/null +++ b/.github/workflows/ci_standalone-rbac-on-repos.yml @@ -0,0 +1,58 @@ +--- +name: Standalone RBAC on Repositories tests +on: + pull_request: + branches: + - '**' + paths-ignore: + - 'docs/**' + - 'mkdocs.yml' + - 'CHANGES/**' + push: + branches: + - '**' + workflow_dispatch: + +jobs: + + integration: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + + - name: Update apt + run: sudo apt -y update + + - name: Install LDAP requirements + run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential + + - name: Install docker-compose + run: pip3 install --upgrade docker-compose + + - name: create the .compose.env file + run: rm -f .compose.env; cp .compose.env.example .compose.env + + - name: workaround github worker permissions issues + run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env + + - name: workaround github worker permissions issues + run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env + + - name: build stack + run: make docker/all + + - name: start the compose stack + run: ./compose up -d + + - name: give stack some time to spin up + run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py + + - name: set keyring on staging repo for signature upload + run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y + + - name: run the integration tests + run: ./dev/standalone-iqe-tests/RUN_RBAC_REPOS_INTEGRATION.sh diff --git a/.github/workflows/ci_standalone-x-repo-search.yml b/.github/workflows/ci_standalone-x-repo-search.yml new file mode 100644 index 0000000000..24fb7ecc43 --- /dev/null +++ b/.github/workflows/ci_standalone-x-repo-search.yml @@ -0,0 +1,58 @@ +--- +name: Standalone Cross Repository Search tests +on: + pull_request: + branches: + - '**' + paths-ignore: + - 'docs/**' + - 'mkdocs.yml' + - 'CHANGES/**' + push: + branches: + - '**' + workflow_dispatch: + +jobs: + + integration: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + + - name: Update apt + run: sudo apt -y update + + - name: Install LDAP requirements + run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential + + - name: Install docker-compose + run: pip3 install --upgrade docker-compose + + - name: create the .compose.env file + run: rm -f .compose.env; cp .compose.env.example .compose.env + + - name: workaround github worker permissions issues + run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env + + - name: workaround github worker permissions issues + run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env + + - name: build stack + run: make docker/all + + - name: start the compose stack + run: ./compose up -d + + - name: give stack some time to spin up + run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py + + - name: set keyring on staging repo for signature upload + run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y + + - name: run the integration tests + run: ./dev/standalone-iqe-tests/RUN_X_REPO_SEARCH_INTEGRATION.sh diff --git a/CHANGES/2125.misc b/CHANGES/2125.misc new file mode 100644 index 0000000000..9692b60e44 --- /dev/null +++ b/CHANGES/2125.misc @@ -0,0 +1 @@ +Implement integration tests for Repository Management feature diff --git a/dev/common/RUN_INTEGRATION.sh b/dev/common/RUN_INTEGRATION.sh index d565f85e67..8f4146d767 100755 --- a/dev/common/RUN_INTEGRATION.sh +++ b/dev/common/RUN_INTEGRATION.sh @@ -39,10 +39,10 @@ $VENVPATH/bin/pip show epdb || pip install epdb # export HUB_LOCAL=1 # dev/common/RUN_INTEGRATION.sh --pdb -sv --log-cli-level=DEBUG "-m standalone_only" -k mytest if [[ -z $HUB_LOCAL ]]; then - $VENVPATH/bin/pytest --capture=no -m "not standalone_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync" $@ -v galaxy_ng/tests/integration + $VENVPATH/bin/pytest --capture=no -m "not standalone_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync and not rm_sync and not x_repo_search and not rbac_repos" $@ -v galaxy_ng/tests/integration RC=$? else - $VENVPATH/bin/pytest --capture=no -m "not cloud_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync" -v $@ galaxy_ng/tests/integration + $VENVPATH/bin/pytest --capture=no -m "not cloud_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync and not rm_sync and not x_repo_search and not rbac_repos" -v $@ galaxy_ng/tests/integration RC=$? if [[ $RC != 0 ]]; then diff --git a/dev/common/RUN_INTEGRATION_STAGE.sh b/dev/common/RUN_INTEGRATION_STAGE.sh index e16afe438a..f907934734 100755 --- a/dev/common/RUN_INTEGRATION_STAGE.sh +++ b/dev/common/RUN_INTEGRATION_STAGE.sh @@ -28,4 +28,4 @@ pip3 install --upgrade pip wheel pip3 install -r integration_requirements.txt -pytest --log-cli-level=DEBUG -m "not standalone_only and not community_only and not rbac_roles and not slow_in_cloud and not iqe_rbac_test and not sync" --junitxml=galaxy_ng-results.xml -v galaxy_ng/tests/integration +pytest --log-cli-level=DEBUG -m "not standalone_only and not community_only and not rbac_roles and not slow_in_cloud and not iqe_rbac_test and not sync and not rm_sync and not x_repo_search and not rbac_repos" --junitxml=galaxy_ng-results.xml -v galaxy_ng/tests/integration diff --git a/dev/ephemeral/run_tests.sh b/dev/ephemeral/run_tests.sh index b0b13caef6..2bf1b8a9ce 100755 --- a/dev/ephemeral/run_tests.sh +++ b/dev/ephemeral/run_tests.sh @@ -12,7 +12,7 @@ ${VENV_PATH}/bin/pip install -r integration_requirements.txt echo "Running pytest ..." ${VENV_PATH}/bin/pytest \ - --capture=no -m "cloud_only or (not standalone_only and not community_only and not sync)" \ + --capture=no -m "cloud_only or (not standalone_only and not community_only and not sync and not rm_sync and not x_repo_search and not rbac_repos)" \ -v \ galaxy_ng/tests/integration $@ RC=$? diff --git a/dev/insights/RUN_INTEGRATION.sh b/dev/insights/RUN_INTEGRATION.sh index 3dc75f86ce..dca8d5e5e9 100755 --- a/dev/insights/RUN_INTEGRATION.sh +++ b/dev/insights/RUN_INTEGRATION.sh @@ -31,7 +31,7 @@ pip show epdb || pip install epdb # export HUB_LOCAL=1 # dev/common/RUN_INTEGRATION.sh --pdb -sv --log-cli-level=DEBUG "-m standalone_only" -k mytest -pytest --capture=no --tb=short -m "not standalone_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync" $@ -v galaxy_ng/tests/integration +pytest --capture=no --tb=short -m "not standalone_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync and not rm_sync and not x_repo_search and not rbac_repos" $@ -v galaxy_ng/tests/integration RC=$? exit $RC diff --git a/dev/standalone-certified-sync/RUN_INTEGRATION.sh b/dev/standalone-certified-sync/RUN_INTEGRATION.sh index ed7bb3c281..72c96183af 100755 --- a/dev/standalone-certified-sync/RUN_INTEGRATION.sh +++ b/dev/standalone-certified-sync/RUN_INTEGRATION.sh @@ -25,7 +25,7 @@ $VENVPATH/bin/pip show epdb || pip install epdb # when running user can specify extra pytest arguments such as # export HUB_LOCAL=1 # dev/common/RUN_INTEGRATION.sh --pdb -sv --log-cli-level=DEBUG "-m standalone_only" -k mytest -$VENVPATH/bin/pytest --capture=no -m "certified_sync" -v $@ galaxy_ng/tests/integration +$VENVPATH/bin/pytest --capture=no -m "certified_sync or rm_sync" -v $@ galaxy_ng/tests/integration RC=$? if [[ $RC != 0 ]]; then diff --git a/dev/standalone-iqe-rbac-tests/Dockerfile b/dev/standalone-iqe-tests/Dockerfile similarity index 100% rename from dev/standalone-iqe-rbac-tests/Dockerfile rename to dev/standalone-iqe-tests/Dockerfile diff --git a/dev/standalone-iqe-rbac-tests/RUN_INTEGRATION.sh b/dev/standalone-iqe-tests/RUN_RBAC_INTEGRATION.sh similarity index 100% rename from dev/standalone-iqe-rbac-tests/RUN_INTEGRATION.sh rename to dev/standalone-iqe-tests/RUN_RBAC_INTEGRATION.sh diff --git a/dev/standalone-iqe-tests/RUN_RBAC_REPOS_INTEGRATION.sh b/dev/standalone-iqe-tests/RUN_RBAC_REPOS_INTEGRATION.sh new file mode 100755 index 0000000000..bd8bdea513 --- /dev/null +++ b/dev/standalone-iqe-tests/RUN_RBAC_REPOS_INTEGRATION.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -e + +unset NAMESPACE +unset HUB_AUTH_URL +export HUB_USE_MOVE_ENDPOINT="true" + + +VENVPATH=/tmp/gng_testing +PIP=${VENVPATH}/bin/pip + +if [[ ! -d $VENVPATH ]]; then + python3.10 -m venv $VENVPATH +fi +source $VENVPATH/bin/activate +echo "PYTHON: $(which python)" + +$VENVPATH/bin/pip install -r integration_requirements.txt +$VENVPATH/bin/pip show epdb || pip install epdb + +echo "Setting up test data" +docker exec -i galaxy_ng_api_1 /entrypoint.sh manage shell < dev/common/setup_test_data.py + +$VENVPATH/bin/pytest --capture=no -m "rbac_repos" -v $@ galaxy_ng/tests/integration +RC=$? + +if [[ $RC != 0 ]]; then + # dump the api logs + docker logs galaxy_ng_api_1 + + # dump the worker logs + docker logs galaxy_ng_worker_1 +fi + +exit $RC diff --git a/dev/standalone-iqe-tests/RUN_X_REPO_SEARCH_INTEGRATION.sh b/dev/standalone-iqe-tests/RUN_X_REPO_SEARCH_INTEGRATION.sh new file mode 100755 index 0000000000..fcc0e835c9 --- /dev/null +++ b/dev/standalone-iqe-tests/RUN_X_REPO_SEARCH_INTEGRATION.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -e + +unset NAMESPACE +unset HUB_AUTH_URL +export HUB_USE_MOVE_ENDPOINT="true" + + +VENVPATH=/tmp/gng_testing +PIP=${VENVPATH}/bin/pip + +if [[ ! -d $VENVPATH ]]; then + python3.10 -m venv $VENVPATH +fi +source $VENVPATH/bin/activate +echo "PYTHON: $(which python)" + +$VENVPATH/bin/pip install -r integration_requirements.txt +$VENVPATH/bin/pip show epdb || pip install epdb + +echo "Setting up test data" +docker exec -i galaxy_ng_api_1 /entrypoint.sh manage shell < dev/common/setup_test_data.py + +$VENVPATH/bin/pytest --capture=no -m "x_repo_search" -v $@ galaxy_ng/tests/integration +RC=$? + +if [[ $RC != 0 ]]; then + # dump the api logs + docker logs galaxy_ng_api_1 + + # dump the worker logs + docker logs galaxy_ng_worker_1 +fi + +exit $RC diff --git a/dev/standalone-iqe-rbac-tests/docker-compose-ui.yaml b/dev/standalone-iqe-tests/docker-compose-ui.yaml similarity index 100% rename from dev/standalone-iqe-rbac-tests/docker-compose-ui.yaml rename to dev/standalone-iqe-tests/docker-compose-ui.yaml diff --git a/dev/standalone-iqe-rbac-tests/docker-compose.yml b/dev/standalone-iqe-tests/docker-compose.yml similarity index 100% rename from dev/standalone-iqe-rbac-tests/docker-compose.yml rename to dev/standalone-iqe-tests/docker-compose.yml diff --git a/dev/standalone-iqe-rbac-tests/galaxy_ng.env b/dev/standalone-iqe-tests/galaxy_ng.env similarity index 100% rename from dev/standalone-iqe-rbac-tests/galaxy_ng.env rename to dev/standalone-iqe-tests/galaxy_ng.env diff --git a/dev/standalone-iqe-rbac-tests/integration-test-dockerfile b/dev/standalone-iqe-tests/integration-test-dockerfile similarity index 100% rename from dev/standalone-iqe-rbac-tests/integration-test-dockerfile rename to dev/standalone-iqe-tests/integration-test-dockerfile diff --git a/dev/standalone/RUN_INTEGRATION.sh b/dev/standalone/RUN_INTEGRATION.sh index 90994677c0..15ab01e8eb 100755 --- a/dev/standalone/RUN_INTEGRATION.sh +++ b/dev/standalone/RUN_INTEGRATION.sh @@ -32,7 +32,7 @@ export HUB_USE_MOVE_ENDPOINT=true # dev/common/RUN_INTEGRATION.sh --pdb -sv --log-cli-level=DEBUG "-m standalone_only" -k mytest pytest \ --capture=no \ - -m "not cloud_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync and not certified_sync" \ + -m "not cloud_only and not community_only and not rbac_roles and not iqe_rbac_test and not sync and not certified_sync and not x_repo_search and not rm_sync and not rbac_repos" \ -v $@ galaxy_ng/tests/integration RC=$? diff --git a/galaxy_ng/tests/integration/api/test_custom_repo_sync.py b/galaxy_ng/tests/integration/api/test_custom_repo_sync.py new file mode 100644 index 0000000000..ab46303ec0 --- /dev/null +++ b/galaxy_ng/tests/integration/api/test_custom_repo_sync.py @@ -0,0 +1,181 @@ +import logging +import os + +import pytest + +from galaxykit.namespaces import create_namespace +from galaxykit.remotes import create_remote +from galaxykit.utils import wait_for_task +from ..utils.repo_management_utils import ( + create_repo_and_dist, + search_collection_endpoint, + create_test_namespace, + upload_new_artifact, + add_content_units, + verify_repo_data, +) +from ..utils.iqe_utils import GalaxyKitClient +from ..utils.tools import generate_random_string + +logger = logging.getLogger(__name__) + + +@pytest.mark.min_hub_version("4.7dev") +class TestCustomReposSync: + @pytest.mark.rm_sync + def test_basic_sync_custom_repo_with_req_file(self, galaxy_client): + """ + Test syncing directly from a custom repo. + Only the collection specified in the requirements file is fetched + """ + # this is the insights mode instance (source hub) + url = os.getenv("TEST_CRC_API_ROOT", "http://localhost:8080/api/automation-hub/") + custom_config = {"url": url} + galaxy_kit_client = GalaxyKitClient(custom_config) + source_client = galaxy_kit_client.gen_authorized_client( + {"username": "notifications_admin", "password": "redhat"}, basic_token=True + ) + + # create repo, distribution, namespace and add a collection + test_repo_name_1 = f"repo-test-{generate_random_string()}" + pulp_href = create_repo_and_dist(source_client, test_repo_name_1) + namespace_name = create_test_namespace(source_client) + namespace_name_no_sync = create_test_namespace(source_client) + tags = ["application"] + artifact = upload_new_artifact( + source_client, namespace_name, test_repo_name_1, "1.0.1", tags=tags + ) + collection_resp = source_client.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(source_client, content_units, pulp_href) + + # this artifact is not going to be synced + artifact_no_sync = upload_new_artifact( + source_client, namespace_name_no_sync, test_repo_name_1, "1.0.1", tags=tags + ) + collection_resp_no_sync = source_client.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_no_sync.name}" + ) + content_units_no_sync = [collection_resp_no_sync["results"][0]["pulp_href"]] + add_content_units(source_client, content_units_no_sync, pulp_href) + + # this is the standalone mode instance (destination local hub) + # create repository, distribution and remote in the local hub + gc = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + params = { + "auth_url": "http://localhost:8080/auth/realms/redhat-external/protocol/openid" + "-connect/token", + "token": "abcdefghijklmnopqrstuvwxyz1234567893", + "requirements_file": f"---\ncollections:\n- {artifact.namespace}.{artifact.name}", + } + create_remote( + gc, test_remote_name, f"{url}content/{test_repo_name_1}/", params=params + ) + create_repo_and_dist(gc, test_repo_name_1, remote=test_remote_name) + + # start sync + sync_url = f"content/{test_repo_name_1}/v3/sync/" + r = gc.post(sync_url, body="") + wait_for_task(gc, r, task_id=r["task"]) + + # verify only the collection in the requirement file is synced + matches, results = search_collection_endpoint(gc, name=artifact.name, limit=100) + expected = [ + { + "repo_name": test_repo_name_1, + "cv_version": "1.0.1", + "is_highest": True, + "cv_name": artifact.name, + } + ] + assert verify_repo_data(expected, results) + + matches, _ = search_collection_endpoint(gc, name=artifact_no_sync.name, limit=100) + assert matches == 0 + + @pytest.mark.rm_sync + def test_basic_sync_custom_repo_mirror(self, galaxy_client): + """ + Test syncing directly from a custom repo, without a requirements file and checking + that if the content is not present in the remote repository, + it's removed from the local repo + """ + # this is the insights mode instance (source hub) + url = os.getenv("TEST_CRC_API_ROOT", "http://localhost:8080/api/automation-hub/") + custom_config = {"url": url} + galaxy_kit_client = GalaxyKitClient(custom_config) + source_client = galaxy_kit_client.gen_authorized_client( + {"username": "notifications_admin", "password": "redhat"}, basic_token=True + ) + # create repo, distribution, namespace and add a collection + test_repo_name_1 = f"repo-test-{generate_random_string()}" + pulp_href = create_repo_and_dist(source_client, test_repo_name_1) + namespace_name = create_test_namespace(source_client) + tags = ["application"] + artifact = upload_new_artifact( + source_client, namespace_name, test_repo_name_1, "1.0.1", tags=tags + ) + collection_resp = source_client.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(source_client, content_units, pulp_href) + + # this is the standalone mode instance (destination local hub) + # create repository, distribution, namespace and remote in the local hub + gc = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + params = { + "auth_url": "http://localhost:8080/auth/realms/redhat-external/protocol/openid" + "-connect/token", + "token": "abcdefghijklmnopqrstuvwxyz1234567893", + } + create_remote( + gc, test_remote_name, f"{url}content/{test_repo_name_1}/", params=params + ) + pulp_href = create_repo_and_dist(gc, test_repo_name_1, remote=test_remote_name) + + create_namespace(gc, namespace_name, "ns_group_for_tests") + # this artifact is not in the remote repository, + # so it should be gone after syncing (mirror) + artifact_will_be_gone = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", tags=tags + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_will_be_gone.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, pulp_href) + + matches, _ = search_collection_endpoint( + gc, name=artifact_will_be_gone.name, limit=100 + ) + assert matches == 2 # +1 because it's in staging repo + + # start sync + sync_url = f"content/{test_repo_name_1}/v3/sync/" + r = gc.post(sync_url, body="") + wait_for_task(gc, r, task_id=r["task"]) + + # artifact has been synced + _, results = search_collection_endpoint(gc, name=artifact.name, limit=100) + expected = [ + { + "repo_name": test_repo_name_1, + "cv_version": "1.0.1", + "is_highest": True, + "cv_name": artifact.name, + } + ] + assert verify_repo_data(expected, results) + + # this artifact has been removed from the repo, now it's only in staging repo + matches, results = search_collection_endpoint( + gc, name=artifact_will_be_gone.name, limit=100 + ) + expected = [{"repo_name": "staging", "cv_name": artifact_will_be_gone.name}] + assert matches == 1 + assert verify_repo_data(expected, results) diff --git a/galaxy_ng/tests/integration/api/test_iqe_rbac.py b/galaxy_ng/tests/integration/api/test_iqe_rbac.py index 163839c0eb..8688d69646 100644 --- a/galaxy_ng/tests/integration/api/test_iqe_rbac.py +++ b/galaxy_ng/tests/integration/api/test_iqe_rbac.py @@ -469,7 +469,7 @@ def test_missing_role_delete_collection(self, galaxy_client): gc_user.delete_collection( namespace_name, artifact.name, artifact.version, repository="published" ) - assert ctx.value.args[0] == 403 + assert ctx.value.args[0]["status"] == "403" assert collection_exists(gc, namespace_name, artifact.name, artifact.version) @pytest.mark.iqe_rbac_test diff --git a/galaxy_ng/tests/integration/api/test_rbac_on_repos.py b/galaxy_ng/tests/integration/api/test_rbac_on_repos.py new file mode 100644 index 0000000000..aef3e8182e --- /dev/null +++ b/galaxy_ng/tests/integration/api/test_rbac_on_repos.py @@ -0,0 +1,966 @@ +import pytest +import logging + +from galaxy_ng.tests.integration.utils.repo_management_utils import ( + create_repo_and_dist, + search_collection_endpoint, + create_test_namespace, + upload_new_artifact, + add_content_units, + remove_content_units, +) +from galaxy_ng.tests.integration.utils.rbac_utils import add_new_user_to_new_group + +from galaxy_ng.tests.integration.utils.tools import generate_random_string +from galaxykit.remotes import ( + create_remote, + view_remotes, + update_remote, + delete_remote, + add_permissions_to_remote, +) +from galaxykit.repositories import ( + delete_repository, + create_repository, + patch_update_repository, + put_update_repository, + copy_content_between_repos, + move_content_between_repos, + add_permissions_to_repository, + delete_distribution, +) +from galaxykit.utils import GalaxyClientError + +logger = logging.getLogger(__name__) + + +@pytest.mark.min_hub_version("4.7dev") +class TestRBACRepos: + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_create_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't create repositories + """ + gc = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc) + permissions = [ + "galaxy.add_user", + "galaxy.view_user", + ] # nothing to do with creating repos :P + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc.create_role(role_name, "any_description", permissions) + gc.add_role_to_group(role_name, group["id"]) + gc = galaxy_client(user) + test_repo_name = f"repo-test-{generate_random_string()}" + with pytest.raises(GalaxyClientError) as ctx: + create_repository(gc, test_repo_name) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_create_repo(self, galaxy_client): + """ + Verifies that a user with permission can create repositories + """ + gc = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc) + permissions = ["ansible.add_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc.create_role(role_name, "any_description", permissions) + gc.add_role_to_group(role_name, group["id"]) + gc = galaxy_client(user) + test_repo_name = f"repo-test-{generate_random_string()}" + create_repo_and_dist(gc, test_repo_name) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_delete_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't delete repositories + """ + gc = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc) + test_repo_name = f"repo-test-{generate_random_string()}" + create_repository(gc, test_repo_name) # test repo to be deleted + permissions = [ + "ansible.add_ansiblerepository" + ] # nothing to do with deleting repos :P + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc.create_role(role_name, "any_description", permissions) + gc.add_role_to_group(role_name, group["id"]) + gc = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + delete_repository(gc, test_repo_name) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_delete_repo(self, galaxy_client): + """ + Verifies that a user with permissions can delete repositories + """ + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + test_repo_name = f"repo-test-{generate_random_string()}" + create_repository(gc_admin, test_repo_name) # test repo to be deleted + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + delete_repository(gc_user, test_repo_name) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_upload_to_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't upload to repositories + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_user, namespace_name, test_repo_name, "0.0.1" + ) # (needs upload_to_namespace) + collection_resp = gc_user.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + with pytest.raises(GalaxyClientError) as ctx: + add_content_units( + gc_user, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_remove_from_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't remove cv from repositories + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name, "0.0.1" + ) # (needs upload_to_namespace) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_admin, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + with pytest.raises(GalaxyClientError) as ctx: + remove_content_units( + gc_user, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_upload_to_repo(self, galaxy_client): + """ + Verifies that a user with permissions can upload to repositories + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = [ + "galaxy.upload_to_namespace", + "ansible.modify_ansible_repo_content", + ] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_user, namespace_name, test_repo_name, "0.0.1" + ) # to staging (upload_to_namespace) + collection_resp = gc_user.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_user, content_units, repo_pulp_href + ) # (modify_ansible_repo_content) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_remove_from_repo(self, galaxy_client): + """ + Verifies that a user with permissions can remove from repositories + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = [ + "galaxy.upload_to_namespace", + "ansible.modify_ansible_repo_content", + ] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name, "0.0.1" + ) # to staging (upload_to_namespace) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_admin, content_units, repo_pulp_href + ) # (modify_ansible_repo_content) + remove_content_units( + gc_user, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_patch_update_repo(self, galaxy_client): + """ + Verifies that a user with permissions can update repositories (patch) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.change_ansiblerepository", "galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + resp = create_repository(gc_admin, test_repo_name, description="old_description") + gc_user = galaxy_client(user) + updated_body = {"description": "updated description"} + patch_update_repository(gc_user, resp["pulp_href"].split("/")[-2], updated_body) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_patch_update_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't update repositories (patch) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + resp = create_repository(gc_admin, test_repo_name, description="old_description") + gc_user = galaxy_client(user) + updated_body = {"description": "updated description"} + with pytest.raises(GalaxyClientError) as ctx: + patch_update_repository( + gc_user, resp["pulp_href"].split("/")[-2], updated_body + ) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_put_update_repo(self, galaxy_client): + """ + Verifies that a user with permissions can update repositories (put) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.change_ansiblerepository", "galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + resp = create_repository(gc_admin, test_repo_name, description="old_description") + gc_user = galaxy_client(user) + updated_body = {"name": test_repo_name, "description": "updated description"} + put_update_repository(gc_user, resp["pulp_href"].split("/")[-2], updated_body) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_put_update_repo(self, galaxy_client): + """ + Verifies that a user without permissions can't update repositories (put) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + resp = create_repository(gc_admin, test_repo_name, description="old_description") + gc_user = galaxy_client(user) + updated_body = {"name": test_repo_name, "description": "updated description"} + with pytest.raises(GalaxyClientError) as ctx: + put_update_repository(gc_user, resp["pulp_href"].split("/")[-2], updated_body) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_any_user_can_use_x_repo_search_endpoint(self, galaxy_client): + """ + Verifies that any user can search in repositories + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.change_ansiblerepository", "galaxy.upload_to_namespace"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + search_collection_endpoint(gc_user, repository_name=test_repo_name) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_copy_cv_endpoint(self, galaxy_client): + """ + Verifies a user with permissions can use the copy cv endpoint + """ + gc_admin = galaxy_client("iqe_admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "0.0.1" + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + # new user + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.modify_ansible_repo_content"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + + copy_content_between_repos( + gc_user, content_units, repo_pulp_href_1, [repo_pulp_href_2] + ) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_move_cv_endpoint(self, galaxy_client): + """ + Verifies a user with permissions can use the move cv endpoint + """ + gc_admin = galaxy_client("iqe_admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "0.0.1" + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + # new user + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.modify_ansible_repo_content"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + + move_content_between_repos( + gc_user, content_units, repo_pulp_href_1, [repo_pulp_href_2] + ) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_copy_cv_endpoint(self, galaxy_client): + """ + Verifies a user without permissions can't use the copy cv endpoint + """ + gc_admin = galaxy_client("iqe_admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "0.0.1" + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + # new user + user, group = add_new_user_to_new_group(gc_admin) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + copy_content_between_repos( + gc_user, content_units, repo_pulp_href_1, [repo_pulp_href_2] + ) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_move_cv_endpoint(self, galaxy_client): + """ + Verifies a user without permissions can't use the move cv endpoint + """ + gc_admin = galaxy_client("iqe_admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "0.0.1" + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + # new user + user, group = add_new_user_to_new_group(gc_admin) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + move_content_between_repos( + gc_user, content_units, repo_pulp_href_1, [repo_pulp_href_2] + ) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_add_remote_missing_role(self, galaxy_client): + """ + Verifies a user without permissions can't create remotes + """ + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + gc_user = galaxy_client(user) + + test_remote_name = f"remote-test-{generate_random_string()}" + with pytest.raises(GalaxyClientError) as ctx: + create_remote(gc_user, test_remote_name, gc_admin.galaxy_root) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_add_remote(self, galaxy_client): + """ + Verifies a user with permissions can create remotes + """ + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + gc_user = galaxy_client(user) + + permissions = ["ansible.add_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_user, test_remote_name, gc_admin.galaxy_root) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_view_remotes_missing_role(self, galaxy_client): + """ + Verifies a user without permissions can't view remotes + """ + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + view_remotes(gc_user) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_view_remote_role(self, galaxy_client): + """ + Verifies a user with permissions can view remotes + """ + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.view_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + view_remotes(gc_user) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_update_remote_missing_role(self, galaxy_client): + """ + Verifies a user without permissions can't update remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.view_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + update_remote(gc_user, test_remote_name, "new_url", {}) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_update_remote(self, galaxy_client): + """ + Verifies a user with permissions can update remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.view_collectionremote", "ansible.change_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + update_remote(gc_user, test_remote_name, "http://new_url/", {}) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_delete_remote(self, galaxy_client): + """ + Verifies a user with permissions can delete remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.view_collectionremote", "ansible.delete_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + delete_remote(gc_user, test_remote_name) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_delete_remote(self, galaxy_client): + """ + Verifies a user without permissions can't delete remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.view_collectionremote"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + delete_remote(gc_user, test_remote_name) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_manage_roles_remotes(self, galaxy_client): + """ + Verifies a user without permissions can't add permissions to remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + add_permissions_to_remote(gc_user, test_remote_name, "role_name", []) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_manage_roles_remotes(self, galaxy_client): + """ + Verifies a user with permissions can add permissions to remotes + """ + gc_admin = galaxy_client("iqe_admin") + test_remote_name = f"remote-test-{generate_random_string()}" + create_remote(gc_admin, test_remote_name, gc_admin.galaxy_root) + user, group = add_new_user_to_new_group(gc_admin) + + permissions = [ + "ansible.view_collectionremote", + "ansible.manage_roles_collectionremote", + ] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + + gc_user = galaxy_client(user) + add_permissions_to_remote( + gc_user, test_remote_name, "galaxy.collection_remote_owner", [group["name"]] + ) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_upload_to_repo_object_role(self, galaxy_client): + """ + Verifies that a user with permissions can upload to repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.modify_ansible_repo_content"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + artifact = upload_new_artifact(gc_admin, namespace_name, test_repo_name, "0.0.1") + gc_user = galaxy_client(user) + collection_resp = gc_user.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_user, content_units, repo_pulp_href + ) # (modify_ansible_repo_content) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_upload_to_repo_object_role(self, galaxy_client): + """ + Verifies that a user without permissions can't upload to repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + namespace_name = create_test_namespace(gc_admin) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + artifact = upload_new_artifact(gc_admin, namespace_name, test_repo_name, "0.0.1") + gc_user = galaxy_client(user) + collection_resp = gc_user.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + with pytest.raises(GalaxyClientError) as ctx: + add_content_units( + gc_user, content_units, repo_pulp_href + ) # (modify_ansible_repo_content) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_update_repo_object_role(self, galaxy_client): + """ + Verifies that a user with permissions can update a repository (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.change_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + updated_body = {"name": test_repo_name, "description": "updated description"} + put_update_repository(gc_user, repo_pulp_href.split("/")[-2], updated_body) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_update_repo_object_role(self, galaxy_client): + """ + Verifies that a user without permissions can't update a repository (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + updated_body = {"name": test_repo_name, "description": "updated description"} + with pytest.raises(GalaxyClientError) as ctx: + put_update_repository(gc_user, repo_pulp_href.split("/")[-2], updated_body) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_delete_repo_object_role(self, galaxy_client): + """ + Verifies that a user with permissions can delete a repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + delete_repository(gc_user, test_repo_name) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_delete_repo_object_role(self, galaxy_client): + """ + Verifies that a user without permissions can't delete a repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.change_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + delete_repository(gc_user, test_repo_name) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_add_permissions_to_repo_object_role(self, galaxy_client): + """ + Verifies that a user with permissions can + add permissions to repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.manage_roles_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + add_permissions_to_repository(gc_user, test_repo_name, role_name, ["admin_staff"]) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_add_permissions_to_repo_object_role(self, galaxy_client): + """ + Verifies that a user without permissions + can't add permissions to repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + add_permissions_to_repository( + gc_user, test_repo_name, role_name, ["admin_staff"] + ) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_add_permissions_to_repo_object_role_global_role(self, galaxy_client): + """ + Verifies that a user with permissions + can add permissions to repositories (global permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.manage_roles_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + create_repo_and_dist(gc_admin, test_repo_name) + gc_user = galaxy_client(user) + add_permissions_to_repository(gc_user, test_repo_name, role_name, ["admin_staff"]) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_add_permissions_to_repo_object_role_global_role( + self, galaxy_client + ): + """ + Verifies that a user without permissions + can't add permissions to repositories (global permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + gc_admin.add_role_to_group(role_name, group["id"]) + create_repo_and_dist(gc_admin, test_repo_name) + gc_user = galaxy_client(user) + with pytest.raises(GalaxyClientError) as ctx: + add_permissions_to_repository( + gc_user, test_repo_name, role_name, ["admin_staff"] + ) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + @pytest.mark.parametrize( + "protected_repo", + ["validated", "rh-certified", "community", "published", "rejected", "staging"], + ) + def test_admin_protected_repos_cant_be_deleted(self, galaxy_client, protected_repo): + """ + Verifies that protected repos can't be deleted + """ + gc_admin = galaxy_client("iqe_admin") + with pytest.raises(GalaxyClientError) as ctx: + delete_repository(gc_admin, protected_repo) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + @pytest.mark.parametrize( + "protected_dist", + ["validated", "rh-certified", "community", "published", "rejected", "staging"], + ) + def test_admin_protected_distributions_cant_be_deleted( + self, galaxy_client, protected_dist + ): + """ + Verifies that protected distributions can't be deleted + """ + gc_admin = galaxy_client("iqe_admin") + with pytest.raises(GalaxyClientError) as ctx: + delete_distribution(gc_admin, protected_dist) + assert ctx.value.response.status_code == 403 + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_role_remove_from_repo_object_role(self, galaxy_client): + """ + Verifies that a user with permissions can remove from repositories (object permission) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = [ + "galaxy.upload_to_namespace", + "ansible.modify_ansible_repo_content", + ] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name, "0.0.1" + ) # to staging (upload_to_namespace) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_admin, content_units, repo_pulp_href + ) # (modify_ansible_repo_content) + remove_content_units( + gc_user, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + + @pytest.mark.rbac_repos + @pytest.mark.standalone_only + def test_missing_role_remove_from_repo_object_role(self, galaxy_client): + """ + Verifies that a user without permissions can't remove cv from repositories (object role) + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc_admin = galaxy_client("iqe_admin") + user, group = add_new_user_to_new_group(gc_admin) + permissions = ["ansible.delete_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc_admin.create_role(role_name, "any_description", permissions) + repo_pulp_href = create_repo_and_dist(gc_admin, test_repo_name) + add_permissions_to_repository( + gc_admin, test_repo_name, role_name, [group["name"]] + ) + namespace_name = create_test_namespace(gc_admin) + gc_user = galaxy_client(user) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name, "0.0.1" + ) # (needs upload_to_namespace) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units( + gc_admin, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + with pytest.raises(GalaxyClientError) as ctx: + remove_content_units( + gc_user, content_units, repo_pulp_href + ) # (needs change_ansiblerepository) + assert ctx.value.response.status_code == 403 diff --git a/galaxy_ng/tests/integration/api/test_repositories.py b/galaxy_ng/tests/integration/api/test_repositories.py new file mode 100644 index 0000000000..c7d18170c1 --- /dev/null +++ b/galaxy_ng/tests/integration/api/test_repositories.py @@ -0,0 +1,172 @@ +import pytest +import logging + +from galaxy_ng.tests.integration.utils.rbac_utils import upload_test_artifact + +from galaxy_ng.tests.integration.utils.repo_management_utils import ( + create_repo_and_dist, + create_test_namespace, + upload_new_artifact, + add_content_units, + search_collection_endpoint, + verify_repo_data, +) +from galaxy_ng.tests.integration.utils.tools import generate_random_string +from galaxykit.collections import sign_collection +from galaxykit.repositories import copy_content_between_repos, move_content_between_repos +from galaxykit.utils import GalaxyClientError + +logger = logging.getLogger(__name__) + + +@pytest.mark.min_hub_version("4.7dev") +class TestRepositories: + @pytest.mark.repositories + def test_cant_upload_same_collection_same_repo(self, galaxy_client): + """ + Verifies that the same collection / version cannot be uploaded to the same repo + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("admin") + create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + with pytest.raises(GalaxyClientError) as ctx: + upload_test_artifact(gc, namespace_name, test_repo_name, artifact) + assert ctx.value.response.status_code == 400 + + @pytest.mark.repositories + def test_copy_cv_endpoint(self, galaxy_client): + """ + Verifies a cv can be copied to a different repo + """ + gc_admin = galaxy_client("admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + copy_content_between_repos(gc_admin, content_units, repo_pulp_href_1, [repo_pulp_href_2]) + # verify cv is in both + matches, results = search_collection_endpoint(gc_admin, name=artifact.name) + expected = [ + {"cv_name": artifact.name, "repo_name": test_repo_name_1, "is_signed": False}, + {"cv_name": artifact.name, "repo_name": test_repo_name_2, "is_signed": False}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.repositories + def test_move_cv_endpoint(self, galaxy_client): + """ + Verifies a cv can be moved to a different repo + """ + gc_admin = galaxy_client("admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + move_content_between_repos(gc_admin, content_units, repo_pulp_href_1, [repo_pulp_href_2]) + # verify cv is only in destination repo + _, results = search_collection_endpoint(gc_admin, name=artifact.name) + expected = [{"cv_name": artifact.name, "repo_name": test_repo_name_2, "is_signed": False}] + assert verify_repo_data(expected, results) + matches, _ = search_collection_endpoint( + gc_admin, name=artifact.name, repository_name=test_repo_name_1 + ) + assert matches == 0 + + @pytest.mark.repositories + @pytest.mark.standalone_only + def test_copy_signed_cv_endpoint(self, galaxy_client): + """ + Verifies a signed cv can be copied to a different repo + """ + gc_admin = galaxy_client("admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + sign_collection(gc_admin, content_units[0], repo_pulp_href_1) + + copy_content_between_repos(gc_admin, content_units, repo_pulp_href_1, [repo_pulp_href_2]) + matches, results = search_collection_endpoint(gc_admin, name=artifact.name) + expected = [ + {"cv_name": artifact.name, "repo_name": test_repo_name_1, "is_signed": True}, + {"cv_name": artifact.name, "repo_name": test_repo_name_2, "is_signed": True}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.repositories + @pytest.mark.standalone_only + def test_move_signed_cv_endpoint(self, galaxy_client): + """ + Verifies a signed cv can be moved to a different repo + """ + gc_admin = galaxy_client("admin") + + test_repo_name_1 = f"repo-test-{generate_random_string()}" + repo_pulp_href_1 = create_repo_and_dist(gc_admin, test_repo_name_1) + + namespace_name = create_test_namespace(gc_admin) + artifact = upload_new_artifact( + gc_admin, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp = gc_admin.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc_admin, content_units, repo_pulp_href_1) + + test_repo_name_2 = f"repo-test-{generate_random_string()}" + repo_pulp_href_2 = create_repo_and_dist(gc_admin, test_repo_name_2) + + sign_collection(gc_admin, content_units[0], repo_pulp_href_1) + + move_content_between_repos(gc_admin, content_units, repo_pulp_href_1, [repo_pulp_href_2]) + _, results = search_collection_endpoint(gc_admin, name=artifact.name) + expected = [{"cv_name": artifact.name, "repo_name": test_repo_name_2, "is_signed": True}] + assert verify_repo_data(expected, results) + matches, _ = search_collection_endpoint( + gc_admin, name=artifact.name, repository_name=test_repo_name_1 + ) + assert matches == 0 diff --git a/galaxy_ng/tests/integration/api/test_repository_labels.py b/galaxy_ng/tests/integration/api/test_repository_labels.py index 8670a085f3..a5ad12d002 100644 --- a/galaxy_ng/tests/integration/api/test_repository_labels.py +++ b/galaxy_ng/tests/integration/api/test_repository_labels.py @@ -26,5 +26,13 @@ def test_repository_labels(ansible_config): resp["name"] for resp in iterate_all( client, url.format(label)) } + # now we have test cases that create multiple repos, we don't want + # to take them into account in this test case + repos_to_remove = [] + for repo in repos: + if repo.startswith("repo-test-"): + repos_to_remove.append(repo) + for repo in repos_to_remove: + repos.remove(repo) assert repos == labels[label] diff --git a/galaxy_ng/tests/integration/api/test_ui_paths.py b/galaxy_ng/tests/integration/api/test_ui_paths.py index e9d02a0849..442492efe5 100644 --- a/galaxy_ng/tests/integration/api/test_ui_paths.py +++ b/galaxy_ng/tests/integration/api/test_ui_paths.py @@ -163,7 +163,7 @@ def test_api_ui_v1_collection_versions_version_range(ansible_config, uncertified def test_api_ui_v1_distributions(ansible_config): cfg = ansible_config('basic_user') with UIClient(config=cfg) as uclient: - resp = uclient.get('_ui/v1/distributions/') + resp = uclient.get('_ui/v1/distributions/?limit=100') assert resp.status_code == 200 ds = resp.json() diff --git a/galaxy_ng/tests/integration/api/test_x_repo_search.py b/galaxy_ng/tests/integration/api/test_x_repo_search.py new file mode 100644 index 0000000000..93ceae5da4 --- /dev/null +++ b/galaxy_ng/tests/integration/api/test_x_repo_search.py @@ -0,0 +1,986 @@ +import pytest +import logging + +from galaxy_ng.tests.integration.utils.rbac_utils import add_new_user_to_new_group + +from galaxy_ng.tests.integration.utils.repo_management_utils import ( + repo_exists, + create_repo_and_dist, + search_collection_endpoint, + create_test_namespace, + upload_new_artifact, + add_content_units, + verify_repo_data, +) +from galaxy_ng.tests.integration.utils.tools import ( + generate_random_artifact_version, + generate_random_string, +) +from galaxykit.collections import delete_collection, deprecate_collection, sign_collection +from galaxykit.namespaces import create_namespace +from galaxykit.repositories import get_all_repositories, delete_repository, get_distribution_id + +logger = logging.getLogger(__name__) + + +@pytest.mark.min_hub_version("4.7dev") +class TestXRepoSearch: + @pytest.mark.x_repo_search + def test_search_same_collection_diff_versions_same_repo(self, galaxy_client): + """ + Verifies that one collection with diff versions in the same repo + is found and the is_highest flag is correct + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.2", key, tags=["application"]) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [ + collection_resp["results"][0]["pulp_href"], + collection_resp["results"][1]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + matches, result = search_collection_endpoint( + gc, repository_name=test_repo_name, name=artifact.name + ) + assert matches == 2 + expected = [ + {"repo_name": test_repo_name, "cv_version": "1.0.2", "is_highest": True}, + {"is_highest": False, "cv_version": "1.0.1"}, + ] + assert verify_repo_data(expected, result) + + @pytest.mark.x_repo_search + def test_search_same_collection_diff_versions_diff_repo(self, galaxy_client): + """ + Verifies that one collection with diff versions in diff repos is found + """ + test_repo_name_1 = f"repo-test-{generate_random_string()}" + test_repo_name_2 = f"repo-test-{generate_random_string()}" + + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + + namespace_name = create_test_namespace(gc) + key = generate_random_string() + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", key, tags=["application"] + ) + upload_new_artifact( + gc, namespace_name, test_repo_name_2, "1.0.2", key, tags=["application"] + ) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + + content_units_1 = [collection_resp["results"][1]["pulp_href"]] + content_units_2 = [collection_resp["results"][0]["pulp_href"]] + + add_content_units(gc, content_units_1, repo_pulp_href_1) + add_content_units(gc, content_units_2, repo_pulp_href_2) + + _, results = search_collection_endpoint(gc, name=artifact_1.name) + expected = [ + {"repo_name": test_repo_name_1, "cv_name": artifact_1.name, "cv_version": "1.0.1"}, + {"repo_name": test_repo_name_2, "cv_name": artifact_1.name, "cv_version": "1.0.2"}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.x_repo_search + def test_search_same_collection_diff_repo_same_versions(self, galaxy_client): + """ + Verifies that one collection with the same version in diff repos is found + """ + test_repo_name_1 = f"repo-test-{generate_random_string()}" + test_repo_name_2 = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href_1) + add_content_units(gc, content_units, repo_pulp_href_2) + _, results = search_collection_endpoint(gc, name=artifact.name) + expected = [ + {"repo_name": test_repo_name_1, "cv_name": artifact.name, "cv_version": "1.0.1"}, + {"repo_name": test_repo_name_2, "cv_name": artifact.name, "cv_version": "1.0.1"}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.x_repo_search + def test_search_upload_diff_collection_diff_namespaces(self, galaxy_client): + """ + Verifies that two collections in different namespaces in the same repo are found + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = f"ns_{generate_random_string()}" + namespace_name_mod = namespace_name + "_mod" + create_namespace(gc, namespace_name, "ns_group_for_tests") + create_namespace(gc, namespace_name + "_mod", "ns_group_for_tests") + + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + artifact_2 = upload_new_artifact( + gc, namespace_name_mod, test_repo_name, "1.0.2", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + _, results = search_collection_endpoint(gc, repository_name=test_repo_name) + expected = [ + {"repo_name": test_repo_name, "cv_name": artifact_1.name, "cv_version": "1.0.1"}, + {"repo_name": test_repo_name, "cv_name": artifact_2.name, "cv_version": "1.0.2"}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.x_repo_search + def test_search_upload_same_collection_diff_repo_diff_versions_check_both_is_highest( + self, galaxy_client + ): + """ + Verifies that same collection name with two different versions + in two different repos is_highest is True for both versions + """ + test_repo_name_1 = f"repo-test-{generate_random_string()}" + test_repo_name_2 = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + namespace_name = create_test_namespace(gc) + + key = generate_random_string() + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", key, tags=["application"] + ) + upload_new_artifact( + gc, namespace_name, test_repo_name_2, "1.0.2", key, tags=["application"] + ) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + content_units_1 = [collection_resp["results"][0]["pulp_href"]] + content_units_2 = [collection_resp["results"][1]["pulp_href"]] + add_content_units(gc, content_units_1, repo_pulp_href_1) + add_content_units(gc, content_units_2, repo_pulp_href_2) + + _, results = search_collection_endpoint(gc, name=artifact_1.name) + + expected = [ + {"repo_name": test_repo_name_1, "cv_name": artifact_1.name, "is_highest": True}, + {"repo_name": test_repo_name_2, "cv_name": artifact_1.name, "is_highest": True}, + ] + assert verify_repo_data(expected, results) + + @pytest.mark.x_repo_search + def test_search_is_highest_changes_after_deletion(self, galaxy_client): + """ + Verifies that lower version becomes is_highest True when higher version is deleted + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + + key = generate_random_string() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.2", key, tags=["application"]) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + + content_units_1 = [collection_resp["results"][0]["pulp_href"]] + content_units_2 = [collection_resp["results"][1]["pulp_href"]] + add_content_units(gc, content_units_1, repo_pulp_href) + add_content_units(gc, content_units_2, repo_pulp_href) + + _, results = search_collection_endpoint(gc, name=artifact.name) + expected = [ + {"cv_version": "1.0.2", "is_highest": True}, + {"cv_version": "1.0.1", "is_highest": False}, + ] + assert verify_repo_data(expected, results) + delete_collection( + gc, namespace_name, artifact.name, version="1.0.2", repository=test_repo_name + ) + _, results = search_collection_endpoint(gc, name=artifact.name) + expected = [{"cv_version": "1.0.1", "is_highest": True}] + assert verify_repo_data(expected, results) + + @pytest.mark.x_repo_search + def test_search_deprecated_collection(self, galaxy_client): + """ + Verifies is_deprecated flag + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + deprecate_collection(gc, namespace_name, artifact.name, repository=test_repo_name) + matches, results = search_collection_endpoint( + gc, repository_name=test_repo_name, name=artifact.name + ) + expected = [{"repo_name": test_repo_name, "is_deprecated": True}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_cv_that_does_not_exist(self, galaxy_client): + """ + Verifies that search endpoint returns no results when a non-existing cv is searched + """ + gc = galaxy_client("iqe_admin") + matches, _ = search_collection_endpoint( + gc, name=f"does-not-exist-{generate_random_string()}" + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_search_by_repository_name_or_operator(self, galaxy_client): + """ + Verifies that search endpoint can take several repository_name params (OR) + """ + test_repo_name_1 = f"repo-test-1-{generate_random_string()}" + test_repo_name_2 = f"repo-test-2-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + + namespace_name = create_test_namespace(gc) + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.2", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units_1 = [collection_resp_1["results"][0]["pulp_href"]] + content_units_2 = [collection_resp_2["results"][0]["pulp_href"]] + + add_content_units(gc, content_units_1, repo_pulp_href_1) + add_content_units(gc, content_units_2, repo_pulp_href_2) + + matches, results = search_collection_endpoint(gc, repository_name=test_repo_name_1) + expected = [{"repo_name": test_repo_name_1, "cv_name": artifact_1.name, "is_highest": True}] + assert verify_repo_data(expected, results) + assert matches == 1 + + matches, results = search_collection_endpoint( + gc, + repository_name=[ + test_repo_name_1, + test_repo_name_2, + f"does-not-exist-{generate_random_string()}", + ], + ) + expected = [ + {"repo_name": test_repo_name_1, "cv_name": artifact_1.name, "is_highest": True}, + {"repo_name": test_repo_name_2, "cv_name": artifact_2.name, "is_highest": True}, + ] + assert verify_repo_data(expected, results) + assert matches == 2 + + @pytest.mark.x_repo_search + def test_search_by_repository_id(self, galaxy_client): + """ + Verifies that search endpoint accepts repository id as search param + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + repository_id = repo_pulp_href.split("/")[-2] + matches, results = search_collection_endpoint(gc, repository=repository_id) + expected = [{"repo_name": test_repo_name, "cv_name": artifact.name, "is_highest": True}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_by_namespace(self, galaxy_client): + """ + Verifies that search endpoint can search by namespace + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint(gc, namespace=namespace_name) + expected = [{"repo_name": test_repo_name, "cv_name": artifact.name, "is_highest": True}] + assert verify_repo_data(expected, results) + assert matches == 2 # staging + + @pytest.mark.x_repo_search + def test_search_by_version(self, galaxy_client): + """ + Verifies that search endpoint can search by version + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + version = generate_random_artifact_version() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, version, tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint(gc, version=version) + expected = [{"repo_name": test_repo_name, "cv_name": artifact.name, "cv_version": version}] + assert verify_repo_data(expected, results) + + @pytest.mark.parametrize("is_highest,cv_version", [(True, "4.0.2"), (False, "4.0.1")]) + @pytest.mark.x_repo_search + def test_search_is_highest_true_false(self, galaxy_client, is_highest, cv_version): + """ + Verifies that search endpoint can search by is_highest parameter + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "4.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "4.0.2", key, tags=["application"]) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + content_units = [ + collection_resp["results"][0]["pulp_href"], + collection_resp["results"][1]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint( + gc, repository_name=test_repo_name, is_highest=is_highest + ) + expected = [ + {"repo_name": test_repo_name, "cv_name": artifact_1.name, "cv_version": cv_version} + ] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.parametrize("is_deprecated", [True, False]) + @pytest.mark.x_repo_search + def test_search_by_is_deprecated_true_false(self, galaxy_client, is_deprecated): + """ + Verifies that search endpoint can search by is_deprecated parameter + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name, "4.0.1", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + deprecate_collection(gc, namespace_name, artifact_1.name, repository=test_repo_name) + + cv_name = artifact_1.name if is_deprecated else artifact_2.name + + matches, results = search_collection_endpoint( + gc, is_deprecated=is_deprecated, repository_name=test_repo_name + ) + expected = [{"cv_name": cv_name, "is_deprecated": is_deprecated}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_by_tags(self, galaxy_client): + """ + Verifies that search endpoint can search by tags + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + + tag = f"tag{generate_random_string()}" + tags = ["application", "test_tag_2", tag] + + artifact_1 = upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.1", tags=tags) + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name, "4.0.2", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + matches, results = search_collection_endpoint( + gc, tags="application,test_tag_2", repository_name=test_repo_name + ) + expected = [{"cv_name": artifact_1.name}] + assert verify_repo_data(expected, results) + assert matches == 1 + matches, results = search_collection_endpoint( + gc, tags="application,test_tag_3", repository_name=test_repo_name + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_search_by_q(self, galaxy_client): + """ + Verifies that search endpoint can search by q + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name, "4.0.2", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint(gc, q=key, repository_name=test_repo_name) + expected = [{"cv_name": artifact_1.name}] + assert verify_repo_data(expected, results) + assert matches == 1 + matches, results = search_collection_endpoint( + gc, q=f"does-not-exist-{generate_random_string()}" + ) + assert matches == 0 + + @pytest.mark.parametrize("is_signed", [True, False]) + @pytest.mark.x_repo_search + def test_search_by_is_signed_true_false(self, galaxy_client, is_signed): + """ + Verifies that search endpoint can search by is_signed + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name, "4.0.2", tags=["application"] + ) + + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + sign_collection(gc, collection_resp_1["results"][0]["pulp_href"], repo_pulp_href) + cv_name = artifact_1.name if is_signed else artifact_2.name + matches, results = search_collection_endpoint( + gc, is_signed=is_signed, repository_name=test_repo_name + ) + expected = [{"cv_name": cv_name}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_by_distribution_id(self, galaxy_client): + """ + Verifies that search endpoint can search by distribution_id + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.0", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + distribution_id = get_distribution_id(gc, test_repo_name) + matches, results = search_collection_endpoint(gc, distribution=distribution_id) + expected = [{"cv_name": artifact.name, "repo_name": test_repo_name}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_by_base_path(self, galaxy_client): + """ + Verifies that search endpoint can search by base_path + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.0", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint(gc, distribution_base_path=test_repo_name) + expected = [{"cv_name": artifact.name, "repo_name": test_repo_name}] + assert verify_repo_data(expected, results) + assert matches == 1 + + @pytest.mark.x_repo_search + def test_search_by_dependency(self, galaxy_client): + """ + Verifies that search endpoint can search by dependency + """ + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + dep_name = f"names.dep{generate_random_string()}" + artifact = upload_new_artifact( + gc, + namespace_name, + test_repo_name, + "1.0.0", + dependencies={dep_name: "1.0.0"}, + tags=["application"], + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + matches, results = search_collection_endpoint(gc, dependency=dep_name) + expected = [{"cv_name": artifact.name, "repo_name": test_repo_name}] + assert verify_repo_data(expected, results) + assert matches == 2 # staging (+1) + + @pytest.mark.x_repo_search + def test_search_version_range(self, galaxy_client): + """ + Verifies that search endpoint can search by version range + """ + + test_repo_name = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact_1 = upload_new_artifact( + gc, namespace_name, test_repo_name, "6.6.6", tags=["application"] + ) + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1.name}" + ) + + artifact_2 = upload_new_artifact( + gc, namespace_name, test_repo_name, "8.8.8", tags=["application"] + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2.name}" + ) + + artifact_3 = upload_new_artifact( + gc, namespace_name, test_repo_name, "12.6.6", tags=["application"] + ) + collection_resp_3 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_3.name}" + ) + content_units = [ + collection_resp_1["results"][0]["pulp_href"], + collection_resp_2["results"][0]["pulp_href"], + collection_resp_3["results"][0]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + matches, results = search_collection_endpoint( + gc, version_range=">=6.6.6,<8.8.8", repository_name=test_repo_name + ) + expected = [{"cv_name": artifact_1.name, "repo_name": test_repo_name}] + assert verify_repo_data(expected, results) + assert matches == 1 + + matches, results = search_collection_endpoint( + gc, version_range=">=6.6.6", repository_name=test_repo_name + ) + expected = [ + {"cv_name": artifact_2.name, "repo_name": test_repo_name}, + {"cv_name": artifact_1.name, "repo_name": test_repo_name}, + {"cv_name": artifact_3.name, "repo_name": test_repo_name}, + ] + assert verify_repo_data(expected, results) + assert matches == 3 + + matches, results = search_collection_endpoint( + gc, version_range="<=8.8.8", repository_name=test_repo_name + ) + expected = [ + {"cv_name": artifact_2.name, "repo_name": test_repo_name}, + {"cv_name": artifact_1.name, "repo_name": test_repo_name}, + ] + assert verify_repo_data(expected, results) + assert matches == 2 + + @pytest.mark.x_repo_search + def test_private_repo(self, galaxy_client): + """ + Verifies that a basic user can't view private repos + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + + repo_pulp_href = create_repo_and_dist(gc, test_repo_name, private=True) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.2", key, tags=["application"]) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [ + collection_resp["results"][0]["pulp_href"], + collection_resp["results"][1]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + user, group = add_new_user_to_new_group(gc) + gc_user = galaxy_client(user) + # ansible.view_ansiblerepository views private repos too + matches, result = search_collection_endpoint( + gc_user, repository_name=test_repo_name, name=artifact.name + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_any_user_can_see_non_private_repos(self, galaxy_client): + """ + Verifies that a user without permissions can view repos that are not private + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + + repo_pulp_href = create_repo_and_dist(gc, test_repo_name, private=False) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.2", key, tags=["application"]) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [ + collection_resp["results"][0]["pulp_href"], + collection_resp["results"][1]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + user, group = add_new_user_to_new_group(gc) + gc_user = galaxy_client(user) + # ansible.view_ansiblerepository views private repos too + matches, result = search_collection_endpoint( + gc_user, repository_name=test_repo_name, name=artifact.name + ) + assert matches == 2 + + @pytest.mark.x_repo_search + def test_private_repo_with_perm(self, galaxy_client): + """ + Verifies that a user with view permissions can view private repos + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + + repo_pulp_href = create_repo_and_dist(gc, test_repo_name, private=True) + namespace_name = create_test_namespace(gc) + key = generate_random_string() + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", key, tags=["application"] + ) + upload_new_artifact(gc, namespace_name, test_repo_name, "1.0.2", key, tags=["application"]) + + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [ + collection_resp["results"][0]["pulp_href"], + collection_resp["results"][1]["pulp_href"], + ] + add_content_units(gc, content_units, repo_pulp_href) + + user, group = add_new_user_to_new_group(gc) + permissions = ["ansible.view_ansiblerepository"] + role_name = f"galaxy.rbac_test_role_{generate_random_string()}" + gc.create_role(role_name, "any_description", permissions) + gc.add_role_to_group(role_name, group["id"]) + + gc_user = galaxy_client(user) + # ansible.view_ansiblerepository views private repos too + matches, result = search_collection_endpoint( + gc_user, repository_name=test_repo_name, name=artifact.name + ) + assert matches == 2 + + @pytest.mark.x_repo_search + def test_search_non_existing_repo(self, galaxy_client): + """ + Verifies that there are no results when the repository does not exist + """ + test_repo_name_1 = f"repo-test-1-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + namespace_name = create_test_namespace(gc) + artifact_1v1 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1v1.name}" + ) + content_units_1 = [collection_resp_1["results"][0]["pulp_href"]] + add_content_units(gc, content_units_1, repo_pulp_href_1) + + matches, _ = search_collection_endpoint( + gc, repository_name=f"does-not-exist-{generate_random_string()}" + ) + assert matches == 0 + + matches, _ = search_collection_endpoint( + gc, repository_name=f"does-not-exist-{generate_random_string()}", name=artifact_1v1.name + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_search_collection_in_wrong_repo(self, galaxy_client): + """ + Verifies that the search returns no matches when a collection is searched in the wrong repo + """ + test_repo_name_1 = f"repo-test-1-{generate_random_string()}" + test_repo_name_2 = f"repo-test-2-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + create_repo_and_dist(gc, test_repo_name_2) + namespace_name = create_test_namespace(gc) + artifact_1v1 = upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.1", tags=["application"] + ) + collection_resp_1 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_1v1.name}" + ) + content_units_1 = [collection_resp_1["results"][0]["pulp_href"]] + add_content_units(gc, content_units_1, repo_pulp_href_1) + matches, _ = search_collection_endpoint( + gc, repository_name=test_repo_name_2, name=artifact_1v1.name + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_search_after_deletion(self, galaxy_client): + """ + Verifies that the search returns no matches when a collection has been deleted + """ + test_repo_name_2 = f"repo-test-2-{generate_random_string()}" + test_repo_name_3 = f"repo-test-3-{generate_random_string()}" + + gc = galaxy_client("iqe_admin") + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + repo_pulp_href_3 = create_repo_and_dist(gc, test_repo_name_3) + namespace_name = create_test_namespace(gc) + artifact_3v1 = upload_new_artifact( + gc, namespace_name, test_repo_name_2, "1.0.1", tags=["application"] + ) + + collection_resp_3 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_3v1.name}" + ) + content_units_3 = [collection_resp_3["results"][0]["pulp_href"]] + + add_content_units(gc, content_units_3, repo_pulp_href_2) + add_content_units(gc, content_units_3, repo_pulp_href_3) + + delete_collection( + gc, namespace_name, artifact_3v1.name, version="1.0.1", repository=test_repo_name_3 + ) + matches, results = search_collection_endpoint(gc, name=artifact_3v1.name) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_search_after_delete_repo_with_contents(self, galaxy_client): + """ + Verifies a non-empty repo can be deleted and search returns 0 + """ + test_repo_name = f"repo-test-{generate_random_string()}" + gc = galaxy_client("iqe_admin") + repo_pulp_href = create_repo_and_dist(gc, test_repo_name) + namespace_name = create_test_namespace(gc) + artifact = upload_new_artifact( + gc, namespace_name, test_repo_name, "1.0.1", tags=["application"] + ) + collection_resp = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact.name}" + ) + content_units = [collection_resp["results"][0]["pulp_href"]] + add_content_units(gc, content_units, repo_pulp_href) + delete_repository(gc, test_repo_name) + repos = get_all_repositories(gc) + assert not repo_exists(test_repo_name, repos) + matches, results = search_collection_endpoint( + gc, name=artifact.name, repository_name=test_repo_name + ) + assert matches == 0 + + @pytest.mark.x_repo_search + def test_is_highest_per_repo(self, galaxy_client): + """ + Verifies is_highest is per repo + """ + test_repo_name_1 = f"repo-test-1-{generate_random_string()}" + test_repo_name_2 = f"repo-test-2-{generate_random_string()}" + + gc = galaxy_client("iqe_admin") + repo_pulp_href_1 = create_repo_and_dist(gc, test_repo_name_1) + repo_pulp_href_2 = create_repo_and_dist(gc, test_repo_name_2) + namespace_name = create_test_namespace(gc) + key_2 = generate_random_string() + upload_new_artifact( + gc, namespace_name, test_repo_name_1, "1.0.2", key_2, tags=["application"] + ) + artifact_2v1 = upload_new_artifact( + gc, namespace_name, test_repo_name_2, "1.0.1", key_2, tags=["application"] + ) + collection_resp_2 = gc.get( + f"pulp/api/v3/content/ansible/collection_versions/?name={artifact_2v1.name}" + ) + content_units_1 = [collection_resp_2["results"][1]["pulp_href"]] + content_units_2 = [collection_resp_2["results"][0]["pulp_href"]] + + add_content_units(gc, content_units_1, repo_pulp_href_1) + add_content_units(gc, content_units_2, repo_pulp_href_2) + + _, results = search_collection_endpoint(gc, name=artifact_2v1.name) + expected = [ + { + "repo_name": test_repo_name_1, + "cv_name": artifact_2v1.name, + "cv_version": "1.0.2", + "is_highest": True, + }, + { + "repo_name": test_repo_name_2, + "cv_name": artifact_2v1.name, + "cv_version": "1.0.1", + "is_highest": True, + }, + ] + assert verify_repo_data(expected, results) diff --git a/galaxy_ng/tests/integration/conftest.py b/galaxy_ng/tests/integration/conftest.py index 364e6bd86f..9b4f38b55d 100644 --- a/galaxy_ng/tests/integration/conftest.py +++ b/galaxy_ng/tests/integration/conftest.py @@ -64,6 +64,10 @@ certified_sync: sync tests container against container auto_approve: run tests that require AUTO_APPROVE to be set to true private_repos: run tests verifying private repositories +rbac_repos: tests verifying rbac roles on custom repositories +rm_sync: tests verifying syncing custom repositories +x_repo_search: tests verifying cross-repo search endpoint +repositories: tests verifying custom repositories """ diff --git a/galaxy_ng/tests/integration/utils/iqe_utils.py b/galaxy_ng/tests/integration/utils/iqe_utils.py index e22955bc53..1a2c184716 100644 --- a/galaxy_ng/tests/integration/utils/iqe_utils.py +++ b/galaxy_ng/tests/integration/utils/iqe_utils.py @@ -30,7 +30,12 @@ class KeycloakPassword(KeycloakToken): """ def __init__( - self, access_token=None, auth_url=None, validate_certs=False, username=None, password=None + self, + access_token=None, + auth_url=None, + validate_certs=False, + username=None, + password=None, ): self.username = username self.password = password @@ -88,8 +93,8 @@ def max_hub_version(ansible_config, spec): class GalaxyKitClient: - def __init__(self, ansible_config): - self.config = ansible_config + def __init__(self, ansible_config, custom_config=None): + self.config = ansible_config if not custom_config else custom_config def gen_authorized_client( self, @@ -99,9 +104,13 @@ def gen_authorized_client( *, ignore_cache=False, token=None, - remote=False + remote=False, + basic_token=False, ): - config = self.config() + try: + config = self.config() + except TypeError: + config = self.config # role can be either be the name of a user (like `ansible_insights`) # or a dict containing a username and password: # {"username": "autohubtest2", "password": "p@ssword!"} @@ -113,8 +122,9 @@ def gen_authorized_client( if cache_key not in client_cache or ignore_cache: if is_sync_testing(): url = config.get("remote_hub") if remote else config.get("local_hub") - profile_config = self.config("remote_admin") \ - if remote else self.config("local_admin") + profile_config = ( + self.config("remote_admin") if remote else self.config("local_admin") + ) user = profile_config.get_profile_data() if profile_config.get("auth_url"): token = profile_config.get("token") @@ -127,7 +137,8 @@ def gen_authorized_client( "username": user["username"], "password": user["password"], "auth_url": profile_config.get("remote_auth_url") - if remote else profile_config.get("local_auth_url"), + if remote + else profile_config.get("local_auth_url"), "token": token, } else: @@ -150,14 +161,18 @@ def gen_authorized_client( } else: token = get_standalone_token( - role, url, ignore_cache=ignore_cache, ssl_verify=ssl_verify + role, + url, + ignore_cache=ignore_cache, + ssl_verify=ssl_verify, + basic_token=basic_token, ) # ignore_cache=True role.update(token=token) auth = role container_engine = config.get("container_engine") container_registry = config.get("container_registry") - + token_type = None if not basic_token else "Basic" g_client = GalaxyClient( url, auth=auth, @@ -165,6 +180,7 @@ def gen_authorized_client( container_registry=container_registry, container_tls_verify=ssl_verify, https_verify=ssl_verify, + token_type=token_type, ) if ignore_cache: return g_client @@ -176,7 +192,9 @@ def gen_authorized_client( token_cache = {} -def get_standalone_token(user, server, *, ignore_cache=False, ssl_verify=True): +def get_standalone_token( + user, server, *, ignore_cache=False, ssl_verify=True, basic_token=False +): cache_key = f"{server}::{user['username']}" if cache_key not in token_cache or ignore_cache: @@ -194,7 +212,7 @@ def get_standalone_token(user, server, *, ignore_cache=False, ssl_verify=True): token_cache[cache_key] = GalaxyToken(token_value).config["token"] else: token = BasicAuthToken(username, password) - if is_ephemeral_env(): + if is_ephemeral_env() or basic_token: token_cache[cache_key] = token.get() else: with patch("ansible.context.CLIARGS", {"ignore_certs": True}): @@ -213,19 +231,21 @@ def get_standalone_token(user, server, *, ignore_cache=False, ssl_verify=True): def is_standalone(): - return os.getenv('HUB_LOCAL', False) + return os.getenv("HUB_LOCAL", False) def is_ephemeral_env(): - return 'ephemeral' in os.getenv('HUB_API_ROOT', 'http://localhost:5001/api/automation-hub/') + return "ephemeral" in os.getenv( + "HUB_API_ROOT", "http://localhost:5001/api/automation-hub/" + ) def is_stage_environment(): - return os.getenv('TESTS_AGAINST_STAGE', False) + return os.getenv("TESTS_AGAINST_STAGE", False) def is_sync_testing(): - return os.getenv('SYNC_TESTS_STAGE', False) + return os.getenv("SYNC_TESTS_STAGE", False) def get_all_collections(api_client, repo): @@ -235,7 +255,7 @@ def get_all_collections(api_client, repo): order of the collections is not guaranteed and the expected collection might not be returned within the 100 collections. """ - url = f'content/{repo}/v3/collections/?limit=100&offset=0' + url = f"content/{repo}/v3/collections/?limit=100&offset=0" return api_client(url) @@ -253,7 +273,9 @@ def retrieve_collection(artifact, collections): """ local_collection_found = None for local_collection in collections["data"]: - if local_collection["name"] == artifact.name and \ - local_collection["namespace"] == artifact.namespace: + if ( + local_collection["name"] == artifact.name + and local_collection["namespace"] == artifact.namespace + ): local_collection_found = local_collection return local_collection_found diff --git a/galaxy_ng/tests/integration/utils/rbac_utils.py b/galaxy_ng/tests/integration/utils/rbac_utils.py index c03d4c5618..741dabef52 100644 --- a/galaxy_ng/tests/integration/utils/rbac_utils.py +++ b/galaxy_ng/tests/integration/utils/rbac_utils.py @@ -68,12 +68,13 @@ def create_local_image_container(config, client): return ee_name -def upload_test_artifact(client, namespace): +def upload_test_artifact(client, namespace, repo=None, artifact=None): test_version = generate_random_artifact_version() - artifact = build_collection( - "skeleton", - config={"namespace": namespace, "version": test_version}, - ) + if not artifact: + artifact = build_collection( + "skeleton", + config={"namespace": namespace, "version": test_version, "repository_name": repo}, + ) logger.debug(f"Uploading artifact {artifact}") resp = upload_artifact(None, client, artifact) logger.debug("Waiting for upload to be completed") diff --git a/galaxy_ng/tests/integration/utils/repo_management_utils.py b/galaxy_ng/tests/integration/utils/repo_management_utils.py new file mode 100644 index 0000000000..2b08b379f2 --- /dev/null +++ b/galaxy_ng/tests/integration/utils/repo_management_utils.py @@ -0,0 +1,121 @@ +import logging + +from orionutils.generator import build_collection + +from galaxy_ng.tests.integration.utils.rbac_utils import upload_test_artifact +from galaxy_ng.tests.integration.utils.tools import generate_random_string +from galaxykit.namespaces import create_namespace +from galaxykit.repositories import ( + create_repository, + create_distribution, + search_collection, +) +from galaxykit.utils import wait_for_task + +logger = logging.getLogger(__name__) + + +def repo_exists(name, repo_list): + for repo in repo_list: + if repo["name"] == name: + return True + return False + + +def create_repo_and_dist( + client, repo_name, hide_from_search=False, private=False, pipeline=None, remote=None +): + logger.debug(f"creating repo {repo_name}") + repo_res = create_repository( + client, + repo_name, + hide_from_search=hide_from_search, + private=private, + pipeline=pipeline, + remote=remote, + ) + create_distribution(client, repo_name, repo_res["pulp_href"]) + return repo_res["pulp_href"] + + +def edit_results_for_verification(results): + _results = results["data"] + new_results = [] + for data in _results: + repo_name = data["repository"]["name"] + cv_name = data["collection_version"]["name"] + cv_version = data["collection_version"]["version"] + is_highest = data["is_highest"] + is_deprecated = data["is_deprecated"] + is_signed = data["is_signed"] + new_result = { + "repo_name": repo_name, + "cv_name": cv_name, + "cv_version": cv_version, + "is_highest": is_highest, + "is_deprecated": is_deprecated, + "is_signed": is_signed, + } + new_results.append(new_result) + return new_results + + +def search_collection_endpoint(client, **params): + result = search_collection(client, **params) + new_results = edit_results_for_verification(result) + return result["meta"]["count"], new_results + + +def create_test_namespace(gc): + namespace_name = f"ns_test_{generate_random_string()}" + create_namespace(gc, namespace_name, "ns_group_for_tests") + return namespace_name + + +def upload_new_artifact( + gc, namespace, repository, version, key=None, tags=None, dependencies=None +): + artifact = build_collection( + "skeleton", + config={ + "namespace": namespace, + "version": version, + "repository_name": repository, + "tags": tags, + "dependencies": dependencies, + }, + key=key, + ) + upload_test_artifact(gc, namespace, repository, artifact) + return artifact + + +def add_content_units(gc, content_units, repo_pulp_href): + payload = {"add_content_units": content_units} + resp_task = gc.post(f"{repo_pulp_href}modify/", body=payload) + wait_for_task(gc, resp_task) + + +def remove_content_units(gc, content_units, repo_pulp_href): + payload = {"remove_content_units": content_units} + resp_task = gc.post(f"{repo_pulp_href}modify/", body=payload) + wait_for_task(gc, resp_task) + + +def verify_repo_data(expected_repos, actual_repos): + def is_dict_included(dict1, dict2): + # Check if all key-value pairs in dict1 are present in dict2 + for key, value in dict1.items(): + if key not in dict2 or dict2[key] != value: + return False + return True + + for expected_repo in expected_repos: + found = False + for actual_repo in actual_repos: + if is_dict_included(expected_repo, actual_repo): + found = True + if not found: + logger.debug(f"{expected_repo} not found in actual repos") + return False + return True diff --git a/galaxy_ng/tests/integration/utils/tools.py b/galaxy_ng/tests/integration/utils/tools.py index 6bbe733a71..5c84348ff8 100644 --- a/galaxy_ng/tests/integration/utils/tools.py +++ b/galaxy_ng/tests/integration/utils/tools.py @@ -16,6 +16,10 @@ def uuid4(): return str(uuid.uuid4()) +def generate_random_string(length=8): + return str(uuid.uuid4().hex)[:length] + + def iterate_all(api_client, url): """Iterate through all of the items on every page in a paginated list view.""" next = url diff --git a/integration_requirements.txt b/integration_requirements.txt index 57919c5805..88d94f573e 100644 --- a/integration_requirements.txt +++ b/integration_requirements.txt @@ -7,4 +7,4 @@ openapi-spec-validator jsonschema hvac importlib_resources -galaxykit==0.13 +galaxykit @ git+https://github.com/ansible/galaxykit