diff --git a/renku/ui/service/cache/models/project.py b/renku/ui/service/cache/models/project.py index b26b5d7815..5724c197cb 100644 --- a/renku/ui/service/cache/models/project.py +++ b/renku/ui/service/cache/models/project.py @@ -56,7 +56,6 @@ class Project(Model): owner = TextField() initialized = BooleanField() commit_sha = TextField() - branch = TextField() @property def abs_path(self) -> Path: diff --git a/renku/ui/service/controllers/datasets_edit.py b/renku/ui/service/controllers/datasets_edit.py index 230e4c83d6..355c1efe36 100644 --- a/renku/ui/service/controllers/datasets_edit.py +++ b/renku/ui/service/controllers/datasets_edit.py @@ -140,6 +140,7 @@ def to_response(self): "edited": edited, "warnings": warnings, "remote_branch": remote_branch, + "git_url": self.ctx["git_url"], } return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_unlink.py b/renku/ui/service/controllers/datasets_unlink.py index b2bfcd2db1..23ffb6deb3 100644 --- a/renku/ui/service/controllers/datasets_unlink.py +++ b/renku/ui/service/controllers/datasets_unlink.py @@ -77,6 +77,7 @@ def to_response(self): response = { "unlinked": [record.entity.path for record in op_result], "remote_branch": remote_branch, + "git_url": self.ctx["git_url"], } return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index a568cb4b4c..81cdb64fd9 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -31,6 +31,7 @@ AsyncSchema, ErrorResponse, FileDetailsSchema, + GitUrlResponseMixin, RemoteRepositorySchema, RenkuSyncSchema, ) @@ -240,7 +241,7 @@ class ProjectMigrateRequest(AsyncSchema, RemoteRepositorySchema): skip_migrations = fields.Boolean(dump_default=False) -class ProjectMigrateResponse(RenkuSyncSchema): +class ProjectMigrateResponse(RenkuSyncSchema, GitUrlResponseMixin): """Response schema for project migrate.""" was_migrated = fields.Boolean() @@ -375,7 +376,7 @@ def get_obj_type(self, obj): return "error" -class ProjectMigrationCheckResponse(Schema): +class ProjectMigrationCheckResponse(GitUrlResponseMixin): """Response schema for project migration check.""" project_supported = fields.Boolean( diff --git a/renku/ui/service/serializers/common.py b/renku/ui/service/serializers/common.py index 088f8b1630..05ad54e549 100644 --- a/renku/ui/service/serializers/common.py +++ b/renku/ui/service/serializers/common.py @@ -168,3 +168,9 @@ class ErrorResponse(Schema): userReference = fields.String() devReference = fields.String() sentry = fields.String() + + +class GitUrlResponseMixin(Schema): + """Response containing a git url.""" + + git_url = fields.String(required=True, metadata={"description": "Remote git repository url."}) diff --git a/renku/ui/service/serializers/config.py b/renku/ui/service/serializers/config.py index 71ae8135bc..10a1d8711a 100644 --- a/renku/ui/service/serializers/config.py +++ b/renku/ui/service/serializers/config.py @@ -20,6 +20,7 @@ from renku.ui.service.serializers.common import ( AsyncSchema, GitCommitSHA, + GitUrlResponseMixin, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema, @@ -37,7 +38,7 @@ class ConfigShowSchema(Schema): config = fields.Dict(metadata={"description": "Dictionary of configuration items."}, required=True) -class ConfigShowResponse(ConfigShowSchema): +class ConfigShowResponse(ConfigShowSchema, GitUrlResponseMixin): """Response schema for project config show.""" default = fields.Dict(metadata={"description": "Dictionary of default configuration items."}, required=True) @@ -53,7 +54,7 @@ class ConfigSetRequest(AsyncSchema, ConfigShowSchema, MigrateSchema, RemoteRepos """Request schema for config set.""" -class ConfigSetResponse(ConfigShowSchema, RenkuSyncSchema): +class ConfigSetResponse(ConfigShowSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for project config set.""" default = fields.Dict(metadata={"description": "Dictionary of default configuration items."}) diff --git a/renku/ui/service/serializers/datasets.py b/renku/ui/service/serializers/datasets.py index 86bed1ee55..512c45cadd 100644 --- a/renku/ui/service/serializers/datasets.py +++ b/renku/ui/service/serializers/datasets.py @@ -23,6 +23,7 @@ from renku.ui.service.serializers.common import ( AsyncSchema, GitCommitSHA, + GitUrlResponseMixin, JobDetailsResponse, MigrateSchema, RemoteRepositorySchema, @@ -55,7 +56,7 @@ class DatasetCreateRequest(AsyncSchema, DatasetDetailsRequest, RemoteRepositoryS ) -class DatasetCreateResponse(DatasetSlugSchema, RenkuSyncSchema): +class DatasetCreateResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset create view.""" @@ -69,7 +70,7 @@ class DatasetRemoveRequest(AsyncSchema, DatasetSlugSchema, RemoteRepositorySchem """Request schema for a dataset remove.""" -class DatasetRemoveResponse(DatasetSlugSchema, RenkuSyncSchema): +class DatasetRemoveResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset create view.""" @@ -108,7 +109,7 @@ def check_files(self, data, **kwargs): return data -class DatasetAddResponse(DatasetSlugSchema, RenkuSyncSchema): +class DatasetAddResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset add file view.""" project_id = fields.String(required=True) @@ -131,7 +132,7 @@ class DatasetDetailsResponse(DatasetDetails): images = fields.List(fields.Nested(ImageObject)) -class DatasetListResponse(Schema): +class DatasetListResponse(GitUrlResponseMixin): """Response schema for dataset list view.""" datasets = fields.List(fields.Nested(DatasetDetailsResponse), required=True) @@ -156,7 +157,7 @@ class DatasetFileDetails(Schema): added = fields.DateTime() -class DatasetFilesListResponse(DatasetSlugSchema): +class DatasetFilesListResponse(DatasetSlugSchema, GitUrlResponseMixin): """Response schema for dataset files list view.""" files = fields.List(fields.Nested(DatasetFileDetails), required=True) @@ -212,7 +213,7 @@ class DatasetEditRequest( ) -class DatasetEditResponse(RenkuSyncSchema): +class DatasetEditResponse(RenkuSyncSchema, GitUrlResponseMixin): """Dataset edit metadata response.""" edited = fields.Dict(required=True) @@ -243,7 +244,7 @@ def check_filters(self, data, **kwargs): return data -class DatasetUnlinkResponse(RenkuSyncSchema): +class DatasetUnlinkResponse(RenkuSyncSchema, GitUrlResponseMixin): """Dataset unlink files response.""" unlinked = fields.List(fields.String()) diff --git a/renku/ui/service/serializers/templates.py b/renku/ui/service/serializers/templates.py index 0652b253da..28fc70ae0d 100644 --- a/renku/ui/service/serializers/templates.py +++ b/renku/ui/service/serializers/templates.py @@ -25,6 +25,7 @@ from renku.domain_model.dataset import ImageObjectRequestJson from renku.ui.service.config import TEMPLATE_CLONE_DEPTH_DEFAULT from renku.ui.service.errors import UserRepoUrlInvalidError +from renku.ui.service.serializers.common import GitUrlResponseMixin from renku.ui.service.serializers.rpc import JsonRPCResponse from renku.ui.service.utils import normalize_git_url @@ -154,7 +155,7 @@ class ManifestTemplatesResponseRPC(JsonRPCResponse): result = fields.Nested(ManifestTemplatesResponse) -class ProjectTemplateResponse(Schema): +class ProjectTemplateResponse(GitUrlResponseMixin): """Response schema for dataset list view.""" url = fields.String(required=True) diff --git a/renku/ui/service/serializers/workflows.py b/renku/ui/service/serializers/workflows.py index 9fb1cb575f..e6fe36fb5a 100644 --- a/renku/ui/service/serializers/workflows.py +++ b/renku/ui/service/serializers/workflows.py @@ -22,7 +22,7 @@ from renku.domain_model.dataset import DatasetCreatorsJson from renku.infrastructure.persistent import Persistent from renku.ui.cli.utils.plugins import get_supported_formats -from renku.ui.service.serializers.common import GitCommitSHA, RemoteRepositorySchema +from renku.ui.service.serializers.common import GitCommitSHA, GitUrlResponseMixin, RemoteRepositorySchema from renku.ui.service.serializers.rpc import JsonRPCResponse @@ -73,7 +73,7 @@ class WorflowPlanEntryResponse(AbstractPlanResponse): children = fields.List(fields.String) -class WorkflowPlansListResponse(Schema): +class WorkflowPlansListResponse(GitUrlResponseMixin): """Response schema for plan list view.""" plans = fields.List(fields.Nested(WorflowPlanEntryResponse), required=True) @@ -136,7 +136,7 @@ class ParameterSchema(ParameterBaseSchema): pass -class PlanDetailsResponse(AbstractPlanResponse): +class PlanDetailsResponse(AbstractPlanResponse, GitUrlResponseMixin): """Schema for Plan details.""" last_executed = fields.DateTime() @@ -190,7 +190,7 @@ class LinkSchema(Schema): sink_entries = fields.List(fields.Nested(ParameterTargetSchema), data_key="sinks") -class CompositePlanDetailsResponse(AbstractPlanResponse): +class CompositePlanDetailsResponse(AbstractPlanResponse, GitUrlResponseMixin): """Schema for Plan details.""" steps = fields.List(fields.Nested(PlanReferenceSchema), data_key="plans") diff --git a/renku/ui/service/views/api_versions.py b/renku/ui/service/views/api_versions.py index 0df1c7bb51..d4074bbbd9 100644 --- a/renku/ui/service/views/api_versions.py +++ b/renku/ui/service/views/api_versions.py @@ -63,11 +63,9 @@ def add_url_rule( V1_5 = ApiVersion("1.5") V2_0 = ApiVersion("2.0") V2_1 = ApiVersion("2.1") -V2_2 = ApiVersion("2.2") -V2_3 = ApiVersion("2.3", is_base_version=True) +V2_2 = ApiVersion("2.2", is_base_version=True) -VERSIONS_FROM_V2_3 = [V2_3] -VERSIONS_FROM_V2_2 = [V2_2] + VERSIONS_FROM_V2_3 +VERSIONS_FROM_V2_2 = [V2_2] VERSIONS_FROM_V2_1 = [V2_1] + VERSIONS_FROM_V2_2 VERSIONS_FROM_V2_0 = [V2_0] + VERSIONS_FROM_V2_1 VERSIONS_FROM_V1_5 = [V1_5] + VERSIONS_FROM_V2_0 @@ -78,7 +76,6 @@ def add_url_rule( VERSIONS_BEFORE_1_1 = [V1_0] VERSIONS_BEFORE_2_0 = [V1_1, V1_2, V1_3, V1_4, V1_5] + VERSIONS_BEFORE_1_1 VERSIONS_BEFORE_2_2 = [V2_0, V2_1] + VERSIONS_BEFORE_2_0 -VERSIONS_BEFORE_2_3 = [V2_3] + VERSIONS_BEFORE_2_2 MINIMUM_VERSION = V1_0 -MAXIMUM_VERSION = V2_3 +MAXIMUM_VERSION = V2_2 diff --git a/renku/ui/service/views/templates.py b/renku/ui/service/views/templates.py index 308825634d..f6e746147c 100644 --- a/renku/ui/service/views/templates.py +++ b/renku/ui/service/views/templates.py @@ -19,7 +19,7 @@ from renku.ui.service.config import SERVICE_PREFIX from renku.ui.service.controllers.templates_create_project import TemplatesCreateProjectCtrl from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl -from renku.ui.service.views.api_versions import VERSIONS_FROM_V2_3, VersionedBlueprint +from renku.ui.service.views.api_versions import VERSIONS_FROM_V2_2, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, @@ -33,7 +33,7 @@ @templates_blueprint.route( - "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_3 + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @handle_templates_read_errors @@ -73,7 +73,7 @@ def read_manifest_from_template(user_data, cache): @templates_blueprint.route( - "/templates.create_project", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_3 + "/templates.create_project", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @handle_templates_create_errors diff --git a/tests/service/jobs/test_config.py b/tests/service/jobs/test_config.py index 92ddc5f357..0bc77be8c5 100644 --- a/tests/service/jobs/test_config.py +++ b/tests/service/jobs/test_config.py @@ -42,7 +42,7 @@ def test_delay_config_set(svc_client_cache, it_remote_repo_url, view_user_data): updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"config", "remote_branch"} == set(updated_job.ctrl_result["result"].keys()) + assert {"config", "remote_branch", "git_url"} == set(updated_job.ctrl_result["result"].keys()) @pytest.mark.service diff --git a/tests/service/jobs/test_datasets.py b/tests/service/jobs/test_datasets.py index 9b2a5a0cb9..00b865016f 100644 --- a/tests/service/jobs/test_datasets.py +++ b/tests/service/jobs/test_datasets.py @@ -264,7 +264,7 @@ def test_dataset_add_remote_file(url, svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "slug", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) dest = make_project_path( user, @@ -324,7 +324,7 @@ def test_delay_add_file_job(svc_client_cache, it_remote_repo_url_temp_branch, vi updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"remote_branch", "project_id", "files", "slug"} == updated_job.ctrl_result["result"].keys() + assert {"remote_branch", "project_id", "files", "slug", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -402,7 +402,7 @@ def test_delay_create_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"slug", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"slug", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -469,7 +469,7 @@ def test_delay_remove_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, delete_job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"slug", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"slug", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -538,7 +538,7 @@ def test_delay_edit_dataset_job(svc_client_cache, it_remote_repo_url_temp_branch updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"warnings", "remote_branch", "edited"} == updated_job.ctrl_result["result"].keys() + assert {"warnings", "remote_branch", "edited", "git_url"} == updated_job.ctrl_result["result"].keys() assert {"name"} == updated_job.ctrl_result["result"]["edited"].keys() @@ -610,7 +610,7 @@ def test_delay_unlink_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"unlinked", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"unlinked", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() assert ["data/data1"] == updated_job.ctrl_result["result"]["unlinked"] @@ -674,7 +674,7 @@ def test_unlink_dataset_sync(svc_client_cache, it_remote_repo_url_temp_branch, v updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"unlinked", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"unlinked", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() assert ["data/data1"] == updated_job.ctrl_result["result"]["unlinked"] diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index eead315c0a..ec947522e9 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -944,7 +944,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ assert response assert 200 == response.status_code - assert {"datasets"} == set(response.json["result"].keys()), response.json + assert {"datasets", "git_url"} == set(response.json["result"].keys()), response.json assert 1 == len(response.json["result"]["datasets"]) payload = { diff --git a/tests/service/views/test_dataset_views.py b/tests/service/views/test_dataset_views.py index 647ddcd4ad..1d1e302169 100644 --- a/tests/service/views/test_dataset_views.py +++ b/tests/service/views/test_dataset_views.py @@ -80,7 +80,7 @@ def test_create_dataset_view(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] @@ -96,7 +96,7 @@ def test_create_dataset_view_with_datadir(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -123,7 +123,7 @@ def test_remote_create_dataset_view(svc_client_cache, it_remote_repo_url): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) @pytest.mark.service @@ -176,7 +176,7 @@ def test_remove_dataset_view(svc_client_with_repo): response = svc_client.post("/datasets.remove", data=json.dumps(payload), headers=headers) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] # NOTE: Ensure that dataset does not exist in this project anymore! @@ -221,7 +221,7 @@ def test_create_dataset_with_metadata(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -276,7 +276,7 @@ def test_create_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -324,7 +324,7 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -377,7 +377,7 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -415,7 +415,7 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -558,7 +558,7 @@ def test_add_file_view(svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "project_id", "files", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "project_id", "files", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert 1 == len(response.json["result"]["files"]) assert file_id == response.json["result"]["files"][0]["file_id"] @@ -615,7 +615,7 @@ def test_list_datasets_view(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "version", @@ -670,7 +670,7 @@ def test_list_datasets_view_remote(svc_client_with_repo, it_remote_repo_url): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "version", @@ -738,7 +738,7 @@ def test_list_datasets_files_remote(svc_client_with_repo, it_remote_repo_url): response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"files", "slug"} == set(response.json["result"].keys()) + assert {"files", "slug", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["files"]) assert "ds1" == response.json["result"]["slug"] @@ -773,7 +773,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params_list = { @@ -782,7 +782,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "creators", @@ -820,7 +820,7 @@ def test_list_dataset_files(svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert file_id == response.json["result"]["files"][0]["file_id"] params = { @@ -830,7 +830,7 @@ def test_list_dataset_files(svc_client_with_repo): response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"slug", "files"} == set(response.json["result"].keys()) + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) assert params["slug"] == response.json["result"]["slug"] assert file_name in [file["name"] for file in response.json["result"]["files"]] assert {"name", "path", "added"} == response.json["result"]["files"][0].keys() @@ -872,14 +872,14 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] payload = {"git_url": url_components.href, "slug": payload["slug"], "files": [{"file_id": file_["file_id"]}]} response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert file_["file_id"] == response.json["result"]["files"][0]["file_id"] params = { @@ -889,7 +889,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"slug", "files"} == set(response.json["result"].keys()) + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) assert params["slug"] == response.json["result"]["slug"] assert file_["file_name"] in [file["name"] for file in response.json["result"]["files"]] @@ -931,7 +931,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] payload = { @@ -942,7 +942,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert files == response.json["result"]["files"] params = { @@ -951,7 +951,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"slug", "files"} == set(response.json["result"].keys()) + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) assert params["slug"] == response.json["result"]["slug"] assert file_["file_name"] in [file["name"] for file in response.json["result"]["files"]] @@ -969,7 +969,7 @@ def test_add_existing_file(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] files = [{"file_path": "README.md"}] @@ -980,7 +980,7 @@ def test_add_existing_file(svc_client_with_repo): } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert files == response.json["result"]["files"] @@ -1083,7 +1083,7 @@ def test_dataset_add_remote(url, svc_client_cache, project_metadata): response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "slug", "project_id", "remote_branch"} == set(response.json["result"]) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"]) job_id = response.json["result"]["files"][0]["job_id"] user_job = cache.get_job(user, job_id) @@ -1119,7 +1119,7 @@ def test_dataset_add_multiple_remote(svc_client_cache, project_metadata): response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "slug", "project_id", "remote_branch"} == set(response.json["result"]) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"]) for file in response.json["result"]["files"]: job_id = file["job_id"] @@ -1148,7 +1148,7 @@ def test_add_remote_and_local_file(svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) for pair in zip(response.json["result"]["files"], payload["files"]): if "job_id" in pair[0]: assert pair[0].pop("job_id") @@ -1199,7 +1199,7 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params_list = { @@ -1221,7 +1221,7 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada edit_payload["custom_metadata_source"] = custom_metadata_source response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert { "name": "my new name", "keywords": ["keyword1"], @@ -1250,7 +1250,7 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params_list = { @@ -1267,7 +1267,7 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {} == response.json["result"]["edited"] params_list = { @@ -1308,7 +1308,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"git_url", "slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params_list = { @@ -1328,7 +1328,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "git_url", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert { "keywords": [], "custom_metadata": None, @@ -1375,7 +1375,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"slug", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert payload["slug"] == response.json["result"]["slug"] params = { @@ -1400,7 +1400,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {"images"} == response.json["result"]["edited"].keys() images = response.json["result"]["edited"]["images"] @@ -1438,7 +1438,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {"images"} == response.json["result"]["edited"].keys() assert 0 == len(response.json["result"]["edited"]["images"]) @@ -1451,7 +1451,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert 0 == len(response.json["result"]["edited"].keys()) @@ -1499,7 +1499,7 @@ def test_unlink_file(unlink_file_setup): response = svc_client.post("/datasets.unlink", data=json.dumps(unlink_payload), headers=headers) assert_rpc_response(response) - assert {"unlinked", "remote_branch"} == set(response.json["result"].keys()) + assert {"unlinked", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert any(p.endswith("README.md") for p in response.json["result"]["unlinked"]) diff --git a/tests/service/views/test_templates_views.py b/tests/service/views/test_templates_views.py index facce869b8..36f9c86833 100644 --- a/tests/service/views/test_templates_views.py +++ b/tests/service/views/test_templates_views.py @@ -212,7 +212,7 @@ def test_create_project_from_template_failures(svc_client_templates_creation): assert 200 == response.status_code assert {"error"} == set(response.json.keys()) assert UserProjectCreationError.code == response.json["error"]["code"], response.json - assert "git_url" in response.json["error"]["devMessage"] + assert "`project_repository`, `project_namespace`" in response.json["error"]["devMessage"] # NOTE: missing fields -- unlikely to happen. If that is the case, we should determine if it's a user error or not payload_missing_field = deepcopy(payload) diff --git a/tests/service/views/test_workflow_plan_views.py b/tests/service/views/test_workflow_plan_views.py index 298bdf6002..1233a82b38 100644 --- a/tests/service/views/test_workflow_plan_views.py +++ b/tests/service/views/test_workflow_plan_views.py @@ -37,7 +37,7 @@ def test_list_workflow_plans_view(svc_client_with_repo): response = svc_client.get("/workflow_plans.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"plans"} == set(response.json["result"].keys()) + assert {"plans", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["plans"]) assert { "children",