From 2174a3d10bc2a697990682ee59848316ca8e42b2 Mon Sep 17 00:00:00 2001 From: xdaile Date: Thu, 4 Jan 2024 13:17:53 +0100 Subject: [PATCH 1/6] Create pydantic models --- iib/common/pydantic_models.py | 356 ++++++++++++++++++++++++++++++++++ iib/common/pydantic_utils.py | 126 ++++++++++++ requirements.txt | 155 ++++++++++++++- setup.py | 2 + 4 files changed, 630 insertions(+), 9 deletions(-) create mode 100644 iib/common/pydantic_models.py create mode 100644 iib/common/pydantic_utils.py diff --git a/iib/common/pydantic_models.py b/iib/common/pydantic_models.py new file mode 100644 index 00000000..7a819bc4 --- /dev/null +++ b/iib/common/pydantic_models.py @@ -0,0 +1,356 @@ +from typing import Any, Dict, List, Optional, Union +from typing_extensions import Annotated + +from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + model_validator, + SecretStr, +) + +from iib.exceptions import ValidationError +from iib.common.pydantic_utils import ( + DISTRIBUTION_SCOPE_LITERAL, + GRAPH_MODE_LITERAL, + binary_image_check, + distribution_scope_lower, + get_unique_bundles, + get_unique_deprecation_list_items, + image_format_check, + images_format_check, + length_validator, + from_index_add_arches, + validate_graph_mode_index_image, + validate_overwrite_params, +) + +UnionPydanticRequestType = Union[ + 'AddPydanticModel', + 'CreateEmptyIndexPydanticModel', + 'FbcOperationsPydanticModel', + 'MergeIndexImagePydanticModel', + 'RecursiveRelatedBundlesPydanticModel', + 'RegenerateBundlePydanticModel', + 'RmPydanticModel', +] + + +class PydanticModel(BaseModel): + + @classmethod + def _get_all_keys_to_check_in_db(cls): + raise NotImplementedError("Not implemented") + + def get_keys_to_check_in_db(self): + """Filter keys, which need to be checked in db. Return only a keys that are set to values.""" + return [ + k for k in self._get_all_keys_to_check_in_db() if getattr(self, k, None) + ] + + +class AddPydanticModel(PydanticModel): + """Datastructure of the request to /builds/add API point.""" + + add_arches: Optional[List[str]] = None + binary_image: Annotated[ + Optional[str], + AfterValidator(length_validator), + AfterValidator(binary_image_check), + ] = None + build_tags: Optional[List[str]] = [] + bundles: Annotated[ + List[str], + AfterValidator(length_validator), + AfterValidator(get_unique_bundles), + AfterValidator(images_format_check), + ] + cnr_token: Optional[SecretStr] = None # deprecated + check_related_images: Optional[bool] = False + deprecation_list: Annotated[ + Optional[List[str]], + AfterValidator(get_unique_deprecation_list_items), + AfterValidator(images_format_check), + ] = [] # deprecated + distribution_scope: Annotated[ + Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + ] = None + force_backport: Optional[bool] = False # deprecated + from_index: Annotated[str, AfterValidator(image_format_check)] + graph_update_mode: Optional[GRAPH_MODE_LITERAL] = None + organization: Optional[str] = None # deprecated + overwrite_from_index: Optional[bool] = False + overwrite_from_index_token: Optional[SecretStr] = None + + _from_index_add_arches_check = model_validator(mode='after')(from_index_add_arches) + + # TODO remove this comment -> Validator from RequestIndexImageMixin class + @model_validator(mode='after') + def verify_overwrite_from_index_token(self) -> 'AddPydanticModel': + """Check the 'overwrite_from_index' parameter in combination with 'overwrite_from_index_token' parameter.""" + validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) + return self + + # TODO remove this comment -> Validator from RequestAdd class + @model_validator(mode='after') + def verify_graph_update_mode_with_index_image(self) -> 'AddPydanticModel': + """Validate graph mode and check if index image is allowed to use different graph mode.""" + validate_graph_mode_index_image(self.graph_update_mode, self.from_index) + return self + + # TODO remove this comment -> Validator from RequestAdd class + @model_validator(mode='after') + def from_index_needed_if_no_bundles(self) -> 'AddPydanticModel': + """ + Check if no bundles and `from_index is specified + + if no bundles and no from index then an empty index will be created which is a no-op + """ + if not (self.bundles or self.from_index): + raise ValidationError('"from_index" must be specified if no bundles are specified') + return self + + # TODO remove this comment -> Validator from RequestADD class + @model_validator(mode='after') + def bundles_needed_with_check_related_images(self) -> 'AddPydanticModel': + """Verify that `check_related_images` is specified when bundles are specified""" + if self.check_related_images and not self.bundles: + raise ValidationError( + '"check_related_images" must be specified only when bundles are specified' + ) + return self + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=[ + "add_arches", + "build_tags", + "cnr_token", + "force_backport", + "overwrite_from_index", + "overwrite_from_index_token", + ], + exclude_defaults=True, + ) + + + def _get_all_keys_to_check_in_db(self): + return ["binary_image", "bundles", "deprecation_list", "from_index"] + + +class RmPydanticModel(PydanticModel): + """Datastructure of the request to /builds/rm API point.""" + + add_arches: Optional[List[str]] = None + binary_image: Annotated[ + Optional[str], + AfterValidator(binary_image_check), + ] = None + build_tags: Optional[List[str]] = [] + distribution_scope: Annotated[ + Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + ] = None + from_index: Annotated[str, AfterValidator(image_format_check)] + operators: Annotated[List[str], AfterValidator(length_validator)] + overwrite_from_index: Optional[bool] = False + overwrite_from_index_token: Optional[SecretStr] = None + + _from_index_add_arches_check = model_validator(mode='after')(from_index_add_arches) + + @model_validator(mode='after') + def verify_overwrite_from_index_token(self) -> 'RmPydanticModel': + validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) + return self + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], + exclude_defaults=True, + ) + + def _get_all_keys_to_check_in_db(self): + return ["binary_image", "from_index", "operators"] + + +class AddRmBatchPydanticModel(BaseModel): + annotations: Dict[str, Any] + build_requests: List[Union[AddPydanticModel, RmPydanticModel]] + + +class RegistryAuth(BaseModel): + auth: SecretStr + + +class RegistryAuths(BaseModel): # is {"auths":{}} allowed? + auths: Annotated[Dict[SecretStr, RegistryAuth], AfterValidator(length_validator)] + + +class RegenerateBundlePydanticModel(PydanticModel): + """Datastructure of the request to /builds/regenerate-bundle API point.""" + + # BUNDLE_IMAGE, from_bundle_image_resolved, build_tags? + bundle_replacements: Optional[Dict[str, str]] = {} + from_bundle_image: Annotated[str, AfterValidator(image_format_check)] + organization: Optional[str] = None + registry_auths: Optional[RegistryAuths] = None # not in db + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=["registry_auths"], + exclude_defaults=True, + ) + + def _get_all_keys_to_check_in_db(self): + return ["from_bundle_image"] + + +class RegenerateBundleBatchPydanticModel(BaseModel): + build_requests: List[RegenerateBundlePydanticModel] + annotations: Dict[str, Any] + + +class MergeIndexImagePydanticModel(PydanticModel): + """Datastructure of the request to /builds/regenerate-bundle API point.""" + + binary_image: Annotated[ + Optional[str], + AfterValidator(image_format_check), + AfterValidator(binary_image_check), + ] = None + build_tags: Optional[List[str]] = [] + deprecation_list: Annotated[ + Optional[List[str]], + AfterValidator(get_unique_deprecation_list_items), + AfterValidator(images_format_check), + ] = [] + distribution_scope: Annotated[ + Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + ] = None + graph_update_mode: Optional[GRAPH_MODE_LITERAL] = None + overwrite_target_index: Optional[bool] = False # Why do we need this bool? Isn't the token enough? + overwrite_target_index_token: Optional[SecretStr] = None + source_from_index: Annotated[str, AfterValidator(image_format_check)] + target_index: Annotated[Optional[str], AfterValidator(image_format_check)] = None + batch: Optional[str] = None # TODO Not sure with presence + user: Optional[str] = None # TODO Not sure with presence + + @model_validator(mode='after') + def verify_graph_update_mode_with_target_index(self) -> 'MergeIndexImagePydanticModel': + validate_graph_mode_index_image(self.graph_update_mode, self.target_index) + return self + + @model_validator(mode='after') + def verify_overwrite_from_index_token(self) -> 'MergeIndexImagePydanticModel': + validate_overwrite_params( + self.overwrite_target_index, + self.overwrite_target_index_token, + disable_auth_check=True, + ) + return self + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=["build_tags", "overwrite_target_index", "overwrite_target_index_token"], + exclude_defaults=True, + ) + + def _get_all_keys_to_check_in_db(self): + return ["binary_image", "deprecation_list", "source_from_index", "target_index", "target_index"] + + +class CreateEmptyIndexPydanticModel(PydanticModel): + """Datastructure of the request to /builds/regenerate-bundle API point.""" + + binary_image: Annotated[ + Optional[str], + AfterValidator(image_format_check), + AfterValidator(binary_image_check), + ] = None + from_index: Annotated[ + str, + AfterValidator(image_format_check), + AfterValidator(length_validator), + ] + labels: Optional[Dict[str, str]] = {} + output_fbc: Optional[bool] = False + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude_defaults=True, + ) + + def _get_all_keys_to_check_in_db(self): + return ["binary_image", "from_index"] + + +class RecursiveRelatedBundlesPydanticModel(PydanticModel): + organization: Optional[str] = None + parent_bundle_image: Annotated[ + str, + AfterValidator(image_format_check), + AfterValidator(length_validator), + ] + registry_auths: Optional[RegistryAuths] = None # not in db + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=["registry_auths"], + exclude_defaults=True, + ) + + + def _get_all_keys_to_check_in_db(self): + return ["parent_bundle_image"] + + +class FbcOperationsPydanticModel(PydanticModel): + add_arches: Optional[List[str]] = [] + binary_image: Annotated[ + Optional[str], + AfterValidator(image_format_check), + AfterValidator(binary_image_check), + ] = None + bundles: Annotated[ + Optional[List[str]], + AfterValidator(length_validator), + AfterValidator(get_unique_bundles), + AfterValidator(images_format_check), + ] = [] + build_tags: Optional[List[str]] = [] + distribution_scope: Annotated[ + Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + ] = None + fbc_fragment: Annotated[ + str, + AfterValidator(image_format_check), + AfterValidator(length_validator), + ] + from_index: Annotated[ + str, + AfterValidator(image_format_check), + AfterValidator(length_validator), + ] + organization: Optional[str] = None + overwrite_from_index: Optional[bool] = False + overwrite_from_index_token: Optional[SecretStr] = None + + @model_validator(mode='after') + def verify_overwrite_from_index_token(self) -> 'FbcOperationsPydanticModel': + validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) + return self + + def get_json_for_request(self): + """Return json with the parameters we store in the db.""" + return self.model_dump( + exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], + exclude_defaults=True, + ) + + def _get_all_keys_to_check_in_db(self): + return ["binary_image", "bundles", "fbc_fragment", "from_index"] diff --git a/iib/common/pydantic_utils.py b/iib/common/pydantic_utils.py new file mode 100644 index 00000000..968bbef5 --- /dev/null +++ b/iib/common/pydantic_utils.py @@ -0,0 +1,126 @@ +from typing import List, Optional, Any, Literal + +import copy +from werkzeug.exceptions import Forbidden +from flask import current_app +from flask_login import current_user + +from iib.exceptions import ValidationError + + +GRAPH_MODE_LITERAL = Literal['replaces', 'semver', 'semver-skippatch'] +DISTRIBUTION_SCOPE_LITERAL = Literal['prod', 'stage', 'dev'] + + +# TODO add regex in future to not allow following values ":s", "s:", ":"? +def image_format_check(image_name: str) -> str: + if '@' not in image_name and ':' not in image_name: + raise ValidationError( + f'Image {image_name} should have a tag or a digest specified.' + ) + return image_name + + +def images_format_check(image_list: List[str]) -> List[str]: + for image_name in image_list: + image_format_check(image_name) + return image_list + + +def get_unique_bundles(bundles: List[str]) -> List[str]: + if not bundles: + return bundles + + unique_bundles = list(set(bundles)) + if len(unique_bundles) != len(bundles): + duplicate_bundles = copy.copy(bundles) + for bundle in unique_bundles: + duplicate_bundles.remove(bundle) + + # flask.current_app.logger.info( + # f'Removed duplicate bundles from request: {duplicate_bundles}' + # ) + return unique_bundles + + +# RequestIndexImageMixin +def get_unique_deprecation_list_items(deprecation_list: Optional[List[str]]) -> Optional[List[str]]: + return list(set(deprecation_list)) + + +def validate_graph_mode_index_image(graph_update_mode: str, index_image: str) -> 'MergeIndexImageRequestPayload': + """ + Validate graph mode and check if index image is allowed to use different graph mode. + + :param str graph_update_mode: one of the graph mode options + :param str index_image: pullspec of index image to which graph mode should be applied to + :raises: ValidationError when incorrect graph_update_mode is set + :raises: Forbidden when graph_mode can't be used for given index image + """ + + if graph_update_mode: + allowed_from_indexes: List[str] = ["REMOVE_#:r"] # current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] + if index_image not in allowed_from_indexes: + raise Forbidden( + '"graph_update_mode" can only be used on the' + f' following index image: {allowed_from_indexes}' + ) + return graph_update_mode + + +# RequestIndexImageMixin +def from_index_add_arches(model: 'AddRequestPydanticModel') -> 'AddRequestPydanticModel': + """ + Check if both `from_index` and `add_arches` are not specified + """ + if not model.from_index and not model.add_arches: + raise ValidationError('One of "from_index" or "add_arches" must be specified') + return model + + +# RequestIndexImageMixin +def binary_image_check(binary_image: str) -> str: + """ + # Validate binary_image is correctly provided + """ + if not binary_image and not current_app.config['IIB_BINARY_IMAGE_CONFIG']: + raise ValidationError('The "binary_image" value must be a non-empty string') + return binary_image + + +# RequestIndexImageMixin +def validate_overwrite_params( + overwrite_index_image: Optional[bool], + overwrite_index_image_token: Optional[str], + disable_auth_check: Optional[bool] = False, +) -> None: + """ + Check if both `overwrite_index_image` and `overwrite_index_image_token` are specified + + + """ + if overwrite_index_image_token and not overwrite_index_image: + raise ValidationError( + 'The "overwrite_from_index" parameter is required when' + ' the "overwrite_from_index_token" parameter is used' + ) + + # Verify the user is authorized to use overwrite_from_index + # current_user.is_authenticated is only ever False when auth is disabled + # TODO Remove "1 or" + if 1 or disable_auth_check or current_user.is_authenticated: + if overwrite_index_image and not overwrite_index_image_token: + raise Forbidden( + 'You must set "overwrite_from_index_token" to use "overwrite_from_index"' + ) + + +# RequestIndexImageMixin +def distribution_scope_lower(distribution_scope: str) -> str: + return distribution_scope.lower() + + +def length_validator(model_property: Any) -> Any: + if len(model_property) == 0: + raise ValidationError(f"The {type(model_property)} {model_property} should have at least 1 item.") + return model_property diff --git a/requirements.txt b/requirements.txt index da59b0cc..82480768 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,10 @@ amqp==5.0.6 \ --hash=sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2 \ --hash=sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb # via kombu +annotated-types==0.6.0 \ + --hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \ + --hash=sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d + # via pydantic backoff==1.10.0 \ --hash=sha256:5e73e2cbe780e1915a204799dba0a01896f45f4385e636bcca7a0614d879d0cd \ --hash=sha256:b8fba021fac74055ac05eb7c7bfce4723aedde6cd0a504e5326bcb0bdd6d19a4 @@ -164,7 +168,7 @@ decorator==4.4.2 \ # via # dogpile-cache # gssapi -Deprecated==1.2.14 \ +deprecated==1.2.14 \ --hash=sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c \ --hash=sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3 # via opentelemetry-api @@ -183,11 +187,11 @@ Flask-Login==0.6.3 \ --hash=sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333 \ --hash=sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d # via iib (setup.py) -Flask-Migrate==4.0.5 \ +flask-migrate==4.0.5 \ --hash=sha256:613a2df703998e78716cace68cd83972960834424457f5b67f56e74fff950aef \ --hash=sha256:d3f437a8b5f3849d1bb1b60e1b818efc564c66e3fefe90b62e5db08db295e1b1 # via iib (setup.py) -Flask-SQLAlchemy==3.1.1 \ +flask-sqlalchemy==3.1.1 \ --hash=sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0 \ --hash=sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312 # via @@ -202,6 +206,7 @@ googleapis-common-protos==1.52.0 \ greenlet==2.0.2 \ --hash=sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a \ --hash=sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a \ + --hash=sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1 \ --hash=sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43 \ --hash=sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33 \ --hash=sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8 \ @@ -227,6 +232,7 @@ greenlet==2.0.2 \ --hash=sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91 \ --hash=sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5 \ --hash=sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9 \ + --hash=sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417 \ --hash=sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8 \ --hash=sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b \ --hash=sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6 \ @@ -250,8 +256,10 @@ greenlet==2.0.2 \ --hash=sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7 \ --hash=sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75 \ --hash=sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae \ + --hash=sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47 \ --hash=sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b \ --hash=sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470 \ + --hash=sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c \ --hash=sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564 \ --hash=sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9 \ --hash=sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099 \ @@ -381,7 +389,7 @@ Mako==1.3.0 \ --hash=sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9 \ --hash=sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b # via alembic -MarkupSafe==2.1.3 \ +markupsafe==2.1.3 \ --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ @@ -638,6 +646,7 @@ psycopg2-binary==2.9.9 \ --hash=sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041 \ --hash=sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7 \ --hash=sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860 \ + --hash=sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d \ --hash=sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245 \ --hash=sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27 \ --hash=sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417 \ @@ -663,6 +672,117 @@ psycopg2-binary==2.9.9 \ pycparser==2.19 \ --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 # via cffi +pydantic==2.5.3 \ + --hash=sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a \ + --hash=sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4 + # via iib (setup.py) +pydantic-core==2.14.6 \ + --hash=sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556 \ + --hash=sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e \ + --hash=sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411 \ + --hash=sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245 \ + --hash=sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c \ + --hash=sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66 \ + --hash=sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd \ + --hash=sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d \ + --hash=sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b \ + --hash=sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06 \ + --hash=sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948 \ + --hash=sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341 \ + --hash=sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0 \ + --hash=sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f \ + --hash=sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a \ + --hash=sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2 \ + --hash=sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51 \ + --hash=sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80 \ + --hash=sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8 \ + --hash=sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d \ + --hash=sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8 \ + --hash=sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb \ + --hash=sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590 \ + --hash=sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87 \ + --hash=sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534 \ + --hash=sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b \ + --hash=sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145 \ + --hash=sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba \ + --hash=sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b \ + --hash=sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2 \ + --hash=sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e \ + --hash=sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052 \ + --hash=sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622 \ + --hash=sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab \ + --hash=sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b \ + --hash=sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66 \ + --hash=sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e \ + --hash=sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4 \ + --hash=sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e \ + --hash=sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec \ + --hash=sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c \ + --hash=sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed \ + --hash=sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937 \ + --hash=sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f \ + --hash=sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9 \ + --hash=sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4 \ + --hash=sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96 \ + --hash=sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277 \ + --hash=sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23 \ + --hash=sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7 \ + --hash=sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b \ + --hash=sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91 \ + --hash=sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d \ + --hash=sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e \ + --hash=sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1 \ + --hash=sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2 \ + --hash=sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160 \ + --hash=sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9 \ + --hash=sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670 \ + --hash=sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7 \ + --hash=sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c \ + --hash=sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb \ + --hash=sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42 \ + --hash=sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d \ + --hash=sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8 \ + --hash=sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1 \ + --hash=sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6 \ + --hash=sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8 \ + --hash=sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf \ + --hash=sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e \ + --hash=sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a \ + --hash=sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9 \ + --hash=sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1 \ + --hash=sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40 \ + --hash=sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2 \ + --hash=sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d \ + --hash=sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f \ + --hash=sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f \ + --hash=sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af \ + --hash=sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7 \ + --hash=sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda \ + --hash=sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a \ + --hash=sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95 \ + --hash=sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0 \ + --hash=sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60 \ + --hash=sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149 \ + --hash=sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975 \ + --hash=sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4 \ + --hash=sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe \ + --hash=sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94 \ + --hash=sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03 \ + --hash=sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c \ + --hash=sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b \ + --hash=sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a \ + --hash=sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24 \ + --hash=sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391 \ + --hash=sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c \ + --hash=sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab \ + --hash=sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd \ + --hash=sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786 \ + --hash=sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08 \ + --hash=sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8 \ + --hash=sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6 \ + --hash=sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0 \ + --hash=sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421 + # via pydantic pyparsing==3.0.6 \ --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \ --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81 @@ -714,13 +834,17 @@ ruamel-yaml==0.16.10 \ # iib (setup.py) # operator-manifest ruamel-yaml-clib==0.2.6 \ + --hash=sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c \ --hash=sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd \ --hash=sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee \ --hash=sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0 \ + --hash=sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335 \ + --hash=sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6 \ --hash=sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7 \ --hash=sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277 \ --hash=sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104 \ --hash=sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd \ + --hash=sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6 \ --hash=sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0 \ --hash=sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78 \ --hash=sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de \ @@ -736,6 +860,7 @@ ruamel-yaml-clib==0.2.6 \ --hash=sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5 \ --hash=sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe \ --hash=sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751 \ + --hash=sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8 \ --hash=sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502 \ --hash=sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed \ --hash=sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c @@ -751,7 +876,7 @@ six==1.14.0 \ # click-repl # python-dateutil # python-memcached -SQLAlchemy==2.0.22 \ +sqlalchemy==2.0.22 \ --hash=sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb \ --hash=sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00 \ --hash=sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce \ @@ -812,12 +937,14 @@ tenacity==8.1.0 \ --hash=sha256:35525cd47f82830069f0d6b73f7eb83bc5b73ee2fff0437952cedf98b27653ac \ --hash=sha256:e48c437fdf9340f5666b92cd7990e96bc5fc955e1298baf4a907e3972067a445 # via iib (setup.py) -typing-extensions==4.5.0 \ - --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ - --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 +typing-extensions==4.9.0 \ + --hash=sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783 \ + --hash=sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd # via # iib (setup.py) # opentelemetry-sdk + # pydantic + # pydantic-core # sqlalchemy urllib3==1.26.18 \ --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ @@ -836,7 +963,7 @@ wcwidth==0.2.12 \ --hash=sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02 \ --hash=sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c # via prompt-toolkit -Werkzeug==2.3.8 \ +werkzeug==2.3.8 \ --hash=sha256:554b257c74bbeb7a0d254160a4f8ffe185243f52a52035060b761ca62d977f03 \ --hash=sha256:bba1f19f8ec89d4d607a3bd62f1904bd2e609472d93cd85e9d4e178f472c3748 # via @@ -849,23 +976,30 @@ wrapt==1.14.1 \ --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ + --hash=sha256:2020f391008ef874c6d9e208b24f28e31bcb85ccff4f335f15a3251d222b92d9 \ --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ + --hash=sha256:240b1686f38ae665d1b15475966fe0472f78e71b1b4903c143a842659c8e4cb9 \ --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ + --hash=sha256:26046cd03936ae745a502abf44dac702a5e6880b2b01c29aea8ddf3353b68224 \ --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ + --hash=sha256:2feecf86e1f7a86517cab34ae6c2f081fd2d0dac860cb0c0ded96d799d20b335 \ --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ + --hash=sha256:358fe87cc899c6bb0ddc185bf3dbfa4ba646f05b1b0b9b5a27c2cb92c2cea204 \ --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d \ --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ + --hash=sha256:49ef582b7a1152ae2766557f0550a9fcbf7bbd76f43fbdc94dd3bf07cc7168be \ --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ + --hash=sha256:6447e9f3ba72f8e2b985a1da758767698efa72723d5b59accefd716e9e8272bf \ --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ @@ -888,8 +1022,10 @@ wrapt==1.14.1 \ --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ + --hash=sha256:a9008dad07d71f68487c91e96579c8567c98ca4c3881b9b113bc7b33e9fd78b8 \ --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ + --hash=sha256:acae32e13a4153809db37405f5eba5bac5fbe2e2ba61ab227926a22901051c0a \ --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ @@ -904,6 +1040,7 @@ wrapt==1.14.1 \ --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ + --hash=sha256:ecee4132c6cd2ce5308e21672015ddfed1ff975ad0ac8d27168ea82e71413f55 \ --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af diff --git a/setup.py b/setup.py index acfa63c4..1d98168e 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,8 @@ 'tenacity', 'typing-extensions', 'packaging', + 'pydantic', + 'annotated-types', 'opentelemetry-api', 'opentelemetry-sdk', 'opentelemetry-exporter-otlp', From c7f8c8dfc82a15728f57e3232b6ed54ea45411aa Mon Sep 17 00:00:00 2001 From: xdaile Date: Fri, 12 Jan 2024 15:47:28 +0100 Subject: [PATCH 2/6] Use pydantic models --- iib/web/api_v1.py | 471 ++++------- iib/web/iib_static_types.py | 201 ----- iib/web/models.py | 781 ++---------------- iib/workers/tasks/build.py | 108 +-- iib/workers/tasks/build_create_empty_index.py | 26 +- iib/workers/tasks/build_fbc_operations.py | 39 +- iib/workers/tasks/build_merge_index_image.py | 52 +- .../tasks/build_recursive_related_bundles.py | 15 +- iib/workers/tasks/build_regenerate_bundle.py | 14 +- 9 files changed, 342 insertions(+), 1365 deletions(-) diff --git a/iib/web/api_v1.py b/iib/web/api_v1.py index c02b8362..42d8b860 100644 --- a/iib/web/api_v1.py +++ b/iib/web/api_v1.py @@ -1,5 +1,4 @@ # SPDX-License-Identifier: GPL-3.0-or-later -import copy import logging import os from datetime import datetime @@ -12,7 +11,7 @@ from sqlalchemy.sql import text from sqlalchemy import or_ from werkzeug.exceptions import Forbidden, Gone, NotFound -from typing import Any, cast, Dict, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union from iib.exceptions import IIBError, ValidationError from iib.web import db, messaging @@ -24,6 +23,7 @@ Operator, Request, RequestAdd, + RequestCreateEmptyIndex, RequestFbcOperations, RequestMergeIndexImage, RequestRecursiveRelatedBundles, @@ -31,12 +31,22 @@ RequestRm, RequestState, RequestStateMapping, - get_request_query_options, RequestTypeMapping, - RequestCreateEmptyIndex, User, + get_request_query_options, ) from iib.web.s3_utils import get_object_from_s3_bucket +from iib.common.pydantic_models import ( + AddPydanticModel, + RmPydanticModel, + RegenerateBundlePydanticModel, + RegenerateBundleBatchPydanticModel, + AddRmBatchPydanticModel, + CreateEmptyIndexPydanticModel, + RecursiveRelatedBundlesPydanticModel, + FbcOperationsPydanticModel, + MergeIndexImagePydanticModel, +) from botocore.response import StreamingBody from iib.web.utils import pagination_metadata, str_to_bool from iib.workers.tasks.build import ( @@ -51,115 +61,10 @@ from iib.workers.tasks.build_merge_index_image import handle_merge_request from iib.workers.tasks.build_create_empty_index import handle_create_empty_index_request from iib.workers.tasks.general import failed_request_callback -from iib.web.iib_static_types import ( - AddRequestPayload, - AddRmBatchPayload, - CreateEmptyIndexPayload, - FbcOperationRequestPayload, - MergeIndexImagesPayload, - PayloadTypesUnion, - RecursiveRelatedBundlesRequestPayload, - RegenerateBundleBatchPayload, - RegenerateBundlePayload, - RmRequestPayload, -) api_v1 = flask.Blueprint('api_v1', __name__) -def _get_rm_args( - payload: RmRequestPayload, - request: Request, - overwrite_from_index: bool, -) -> List[Union[str, List[str], Dict[str, str], bool, None]]: - """ - Generate arguments for remove request. - - :param RmRequestPayload payload: Payload from the remove request - :param Request request: request saved in the database - :param bool overwrite_from_index: determines if the overwrite should be forced - :return: List with remove arguments - :rtype: list - """ - return [ - payload['operators'], - request.id, - payload['from_index'], - payload.get('binary_image'), - payload.get('add_arches'), - overwrite_from_index, - payload.get('overwrite_from_index_token'), - request.distribution_scope, - flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], - payload.get('build_tags', []), - ] - - -def _get_add_args( - payload: AddRequestPayload, - request: Request, - overwrite_from_index: bool, - celery_queue: Optional[str], -) -> List[Any]: - """ - Generate arguments for add request. - - :param AddRequestPayload payload: Payload from the add request - :param Request request: request saved in the database - :param bool overwrite_from_index: determines if the overwrite should be forced - :param str celery_queue: name of celery queue - :return: List with add arguments - :rtype: list - """ - return [ - payload.get('bundles', []), - request.id, - payload.get('binary_image'), - payload.get('from_index'), - payload.get('add_arches'), - payload.get('cnr_token'), - payload.get('organization'), - payload.get('force_backport'), - overwrite_from_index, - payload.get('overwrite_from_index_token'), - request.distribution_scope, - flask.current_app.config['IIB_GREENWAVE_CONFIG'].get(celery_queue), - flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], - payload.get('deprecation_list', []), - payload.get('build_tags', []), - payload.get('graph_update_mode'), - payload.get('check_related_images', False), - ] - - -def _get_safe_args( - args: List[Any], - payload: PayloadTypesUnion, -) -> List[Union[str, List[str], bool, Dict[str, str]]]: - """ - Generate arguments that are safe to print to stdout or log. - - :param list args: arguments for each api, that are not safe - :param PayloadTypesUnion payload: Payload from the IIB request - :return: List with safe to print arguments - :rtype: list - """ - safe_args = copy.copy(args) - - if payload.get('cnr_token'): - safe_args[safe_args.index(payload['cnr_token'])] = '*****' # type: ignore - if payload.get('overwrite_from_index_token'): - safe_args[safe_args.index(payload['overwrite_from_index_token'])] = '*****' # type: ignore - if payload.get('overwrite_target_index_token'): - safe_args[ - safe_args.index(payload['overwrite_target_index_token']) # type: ignore - ] = '*****' - if payload.get('registry_auths'): - safe_args[safe_args.index(payload['registry_auths'])] = '*****' # type: ignore - - return safe_args - - def get_artifact_file_from_s3_bucket( s3_key_prefix: str, s3_file_name: str, @@ -191,35 +96,6 @@ def get_artifact_file_from_s3_bucket( raise NotFound() -def _get_unique_bundles(bundles: List[str]) -> List[str]: - """ - Return list with unique bundles. - - :param list bundles: bundles given in payload from original request - :return: list of unique bundles preserving order (python 3.6+) - :rtype: list - """ - if not bundles: - return bundles - - # `dict` is preserving order of inserted keys since Python 3.6. - # Keys in dictionary are behaving as a set() therefore can not have same key twice. - # This will create dictionary where keys are taken from `bundles` using `dict.fromkeys()` - # After that we have dictionary with unique keys with same order as it is in `bundles`. - # Last step is to convert the keys from this dictionary to list using `list()` - unique_bundles = list(dict.fromkeys(bundles).keys()) - - if len(unique_bundles) != len(bundles): - duplicate_bundles = copy.copy(bundles) - for bundle in unique_bundles: - duplicate_bundles.remove(bundle) - - flask.current_app.logger.info( - f'Removed duplicate bundles from request: {duplicate_bundles}' - ) - return unique_bundles - - @api_v1.route('/builds/') def get_build(request_id: int) -> flask.Response: """ @@ -576,34 +452,39 @@ def add_bundles() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: AddRequestPayload = cast(AddRequestPayload, flask.request.get_json()) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') - # Only run `_get_unique_bundles` if it is a list. If it's not, `from_json` - # will raise an error to the user. - if payload.get('bundles') and isinstance(payload['bundles'], list): - payload['bundles'] = _get_unique_bundles(payload['bundles']) + try: + request_payload = AddPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 - request = RequestAdd.from_json(payload) + request = RequestAdd.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, + ) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) - overwrite_from_index = payload.get('overwrite_from_index', False) from_index_pull_spec = request.from_index.pull_specification if request.from_index else None celery_queue = _get_user_queue( - serial=overwrite_from_index, from_index_pull_spec=from_index_pull_spec + serial=request_payload.overwrite_from_index, from_index_pull_spec=from_index_pull_spec ) - args = _get_add_args(payload, request, overwrite_from_index, celery_queue) - safe_args = _get_safe_args(args, payload) + args = [ + request_payload, + request.id, + flask.current_app.config['IIB_GREENWAVE_CONFIG'].get(celery_queue), + flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], + ] error_callback = failed_request_callback.s(request.id) try: handle_add_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=celery_queue, headers={'traceparent': flask.request.headers.get('traceparent')}, ) @@ -807,29 +688,35 @@ def rm_operators() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: RmRequestPayload = cast(RmRequestPayload, flask.request.get_json()) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') + try: + request_payload = RmPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 - request = RequestRm.from_json(payload) + request = RequestRm.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, + ) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) - overwrite_from_index = payload.get('overwrite_from_index', False) - - args = _get_rm_args(payload, request, overwrite_from_index) - safe_args = _get_safe_args(args, payload) - + args = [ + request_payload, + request.id, + flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], + ] error_callback = failed_request_callback.s(request.id) from_index_pull_spec = request.from_index.pull_specification if request.from_index else None try: handle_rm_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=_get_user_queue( - serial=overwrite_from_index, + serial=request_payload.overwrite_from_index, from_index_pull_spec=from_index_pull_spec, ), ) @@ -849,30 +736,33 @@ def regenerate_bundle() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: RegenerateBundlePayload = cast(RegenerateBundlePayload, flask.request.get_json()) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') + try: + request_payload = RegenerateBundlePydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + request = RequestRegenerateBundle.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, + ) - request = RequestRegenerateBundle.from_json(payload) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) args = [ - payload['from_bundle_image'], - payload.get('organization'), + request_payload, request.id, - payload.get('registry_auths'), - payload.get('bundle_replacements', dict()), ] - safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: handle_regenerate_bundle_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=_get_user_queue(), ) except kombu.exceptions.OperationalError: @@ -891,27 +781,27 @@ def regenerate_bundle_batch() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: RegenerateBundleBatchPayload = cast( - RegenerateBundleBatchPayload, flask.request.get_json() - ) - Batch.validate_batch_request_params(payload) - batch = Batch(annotations=payload.get('annotations')) + try: + request_payload_batch = RegenerateBundleBatchPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + batch = Batch(annotations=request_payload_batch.annotations) db.session.add(batch) requests = [] # Iterate through all the build requests and verify that the requests are valid before # committing them and scheduling the tasks - for build_request in payload['build_requests']: - try: - request = RequestRegenerateBundle.from_json(build_request, batch) - except ValidationError as e: - # Rollback the transaction if any of the build requests are invalid - db.session.rollback() - raise ValidationError( - f'{str(e).rstrip(".")}. This occurred on the build request in ' - f'index {payload["build_requests"].index(build_request)}.' - ) + for request_payload in request_payload_batch.build_requests: + request = RequestRegenerateBundle.from_json_replacement( + payload=request_payload, + batch=batch, + build_tags_allowed=True, + ) db.session.add(request) requests.append(request) @@ -922,22 +812,17 @@ def regenerate_bundle_batch() -> Tuple[flask.Response, int]: # This list will be used for the log message below and avoids the need of having to iterate # through the list of requests another time processed_request_ids = [] - build_and_requests = zip(payload['build_requests'], requests) + build_and_requests = zip(request_payload.build_requests, requests) try: - for build_request, request in build_and_requests: + for request_payload, request in build_and_requests: args = [ - build_request['from_bundle_image'], - build_request.get('organization'), + request_payload, request.id, - build_request.get('registry_auths'), - build_request.get('bundle_replacements', dict()), ] - safe_args = _get_safe_args(args, build_request) error_callback = failed_request_callback.s(request.id) handle_regenerate_bundle_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=_get_user_queue(), ) @@ -966,34 +851,34 @@ def add_rm_batch() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: AddRmBatchPayload = cast(AddRmBatchPayload, flask.request.get_json()) - Batch.validate_batch_request_params(payload) + try: + request_payload_batch = AddRmBatchPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 - batch = Batch(annotations=payload.get('annotations')) + batch = Batch(annotations=request_payload_batch.annotations) db.session.add(batch) - requests: List[Union[RequestAdd, RequestRm]] = [] + requests: List[Union[AddPydanticModel, RmPydanticModel]] = [] # Iterate through all the build requests and verify that the requests are valid before # committing them and scheduling the tasks - for build_request in payload['build_requests']: - try: - if build_request.get('operators'): - # Check for the validity of a RM request - # cast Union[AddRequestPayload, RmRequestPayload] based on presence of 'operators' - request = RequestRm.from_json(cast(RmRequestPayload, build_request), batch) - elif build_request.get('bundles'): - # cast Union[AddRequestPayload, RmRequestPayload] based on presence of 'bundles' - build_request_uniq = cast(AddRequestPayload, copy.deepcopy(build_request)) - build_request_uniq['bundles'] = _get_unique_bundles(build_request_uniq['bundles']) - # Check for the validity of an Add request - request = RequestAdd.from_json(build_request_uniq, batch) - else: - raise ValidationError('Build request is not a valid Add/Rm request.') - except ValidationError as e: - raise ValidationError( - f'{str(e).rstrip(".")}. This occurred on the build request in ' - f'index {payload["build_requests"].index(build_request)}.' + for request_payload in request_payload_batch.build_requests: + if isinstance(request_payload, AddPydanticModel): + request = RequestAdd.from_json_replacement( + payload=request_payload, + batch=batch, + build_tags_allowed=True, + ) + else: + request = RequestRm.from_json_replacement( + payload=request_payload, + batch=batch, + build_tags_allowed=True, ) + db.session.add(request) requests.append(request) @@ -1004,46 +889,36 @@ def add_rm_batch() -> Tuple[flask.Response, int]: # This list will be used for the log message below and avoids the need of having to iterate # through the list of requests another time processed_request_ids = [] - for build_request, request in zip(payload['build_requests'], requests): + for request_payload, request in zip(request_payload_batch.build_requests, requests): request_jsons.append(request.to_json()) - overwrite_from_index = build_request.get('overwrite_from_index', False) from_index_pull_spec = request.from_index.pull_specification if request.from_index else None celery_queue = _get_user_queue( - serial=overwrite_from_index, from_index_pull_spec=from_index_pull_spec + serial=request_payload.overwrite_from_index, from_index_pull_spec=from_index_pull_spec ) - if isinstance(request, RequestAdd): - args: List[Any] = _get_add_args( - # cast Union[AddRequestPayload, RmRequestPayload] based on request variable - cast(AddRequestPayload, build_request), - request, - overwrite_from_index, - celery_queue, - ) - elif isinstance(request, RequestRm): - args = _get_rm_args( - # cast Union[AddRequestPayload, RmRequestPayload] based on request variable - cast(RmRequestPayload, build_request), - request, - overwrite_from_index, - ) - - safe_args = _get_safe_args(args, build_request) - error_callback = failed_request_callback.s(request.id) try: if isinstance(request, RequestAdd): + args = [ + request_payload, + request.id, + flask.current_app.config['IIB_GREENWAVE_CONFIG'].get(celery_queue), + flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], + ] handle_add_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=celery_queue, ) - else: + elif isinstance(request, RequestRm): + args = [ + request_payload, + request.id, + flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], + ] handle_rm_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=celery_queue, ) except kombu.exceptions.OperationalError: @@ -1069,36 +944,34 @@ def merge_index_image() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: MergeIndexImagesPayload = cast(MergeIndexImagesPayload, flask.request.get_json()) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') - request = RequestMergeIndexImage.from_json(payload) + try: + request_payload = MergeIndexImagePydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + request = RequestMergeIndexImage.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, + ) + db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) - overwrite_target_index = payload.get('overwrite_target_index', False) - celery_queue = _get_user_queue(serial=overwrite_target_index) + celery_queue = _get_user_queue(serial=request_payload.overwrite_target_index) args = [ - payload['source_from_index'], - payload.get('deprecation_list', []), + request_payload, request.id, - payload.get('binary_image'), - payload.get('target_index'), - overwrite_target_index, - payload.get('overwrite_target_index_token'), - request.distribution_scope, flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], - payload.get('build_tags', []), - payload.get('graph_update_mode'), - payload.get('ignore_bundle_ocp_version'), ] - safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: handle_merge_request.apply_async( - args=args, link_error=error_callback, argsrepr=repr(safe_args), queue=celery_queue + args=args, link_error=error_callback, queue=celery_queue ) except kombu.exceptions.OperationalError: handle_broker_error(request) @@ -1118,29 +991,33 @@ def create_empty_index() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: CreateEmptyIndexPayload = cast(CreateEmptyIndexPayload, flask.request.get_json()) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') + try: + request_payload = CreateEmptyIndexPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + request = RequestCreateEmptyIndex.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, + ) - request = RequestCreateEmptyIndex.from_json(payload) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) args = [ - payload['from_index'], + request_payload, request.id, - payload.get('output_fbc'), - payload.get('binary_image'), - payload.get('labels'), flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], ] - safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: handle_create_empty_index_request.apply_async( - args=args, link_error=error_callback, argsrepr=repr(safe_args), queue=_get_user_queue() + args=args, link_error=error_callback, queue=_get_user_queue() ) except kombu.exceptions.OperationalError: handle_broker_error(request) @@ -1160,31 +1037,33 @@ def recursive_related_bundles() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: RecursiveRelatedBundlesRequestPayload = cast( - RecursiveRelatedBundlesRequestPayload, flask.request.get_json() + try: + request_payload = RecursiveRelatedBundlesPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + request = RequestRecursiveRelatedBundles.from_json_replacement( + payload=request_payload, + build_tags_allowed=True, ) - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') - request = RequestRecursiveRelatedBundles.from_json(payload) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) args = [ - payload['parent_bundle_image'], - payload.get('organization'), + request_payload, request.id, - payload.get('registry_auths'), ] - safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: handle_recursive_related_bundles_request.apply_async( args=args, link_error=error_callback, - argsrepr=repr(safe_args), queue=_get_user_queue(), ) except kombu.exceptions.OperationalError: @@ -1264,38 +1143,38 @@ def fbc_operations() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - payload: FbcOperationRequestPayload = flask.request.get_json() - if not isinstance(payload, dict): - raise ValidationError('The input data must be a JSON object') + try: + request_payload = FbcOperationsPydanticModel.model_validate( + flask.request.get_json(), strict=True, + ) + except ValidationError as e: + # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response + return flask.jsonify({'Error parsing data': str(e)}), 400 + + request = RequestFbcOperations.from_json_replacement( + payload=request_payload, + batch_disabled=True, + build_tags_allowed=True, + ) - request = RequestFbcOperations.from_json(payload) db.session.add(request) db.session.commit() messaging.send_message_for_state_change(request, new_batch_msg=True) - overwrite_from_index = payload.get('overwrite_from_index', False) from_index_pull_spec = request.from_index.pull_specification if request.from_index else None celery_queue = _get_user_queue( - serial=overwrite_from_index, from_index_pull_spec=from_index_pull_spec + serial=request_payload.overwrite_from_index, from_index_pull_spec=from_index_pull_spec ) args = [ - request.id, - payload['fbc_fragment'], - payload['from_index'], - payload.get('binary_image'), - payload.get('distribution_scope'), - payload.get('overwrite_from_index'), - payload.get('overwrite_from_index_token'), - payload.get('build_tags'), - payload.get('add_arches'), + request_payload, + request_payload, flask.current_app.config['IIB_BINARY_IMAGE_CONFIG'], ] - safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: handle_fbc_operation_request.apply_async( - args=args, link_error=error_callback, argsrepr=repr(safe_args), queue=celery_queue + args=args, link_error=error_callback, queue=celery_queue ) except kombu.exceptions.OperationalError: handle_broker_error(request) diff --git a/iib/web/iib_static_types.py b/iib/web/iib_static_types.py index b72520d8..f4682781 100644 --- a/iib/web/iib_static_types.py +++ b/iib/web/iib_static_types.py @@ -35,207 +35,6 @@ class RelatedBundlesMetadata(TypedDict): url: str -# Start of the Payloads Part - -# try inheritance from other payloads - -PayloadTags = Literal[ - 'AddRequestPayload', - 'RmRequestPayload', - 'RegenerateBundlePayload', - 'RegenerateBundleBatchPayload', - 'AddRmBatchPayload', - 'MergeIndexImagesPayload', - 'CreateEmptyIndexPayload', - 'FbcOperationRequestPayload', -] - - -PossiblePayloadParameters = Sequence[ - Literal[ - 'add_arches', - 'annotations', - 'batch', - 'binary_image', - 'build_requests', - 'build_tags', - 'bundles', - 'cnr_token', - 'check_related_images', - 'deprecation_list', - 'distribution_scope', - 'force_backport', - 'from_bundle_image', - 'from_index', - 'graph_update_mode', - 'labels', - 'operators', - 'organization', - 'output_fbc', - 'overwrite_from_index', - 'overwrite_from_index_token', - 'registry_auths', - 'related_bundles', - 'source_from_index', - 'target_index', - 'user', - ] -] - - -class AddRequestPayload(TypedDict): - """Datastructure of the request to /builds/add API point.""" - - add_arches: NotRequired[List[str]] - binary_image: NotRequired[str] - build_tags: NotRequired[List[str]] - bundles: List[str] - cnr_token: NotRequired[str] - check_related_images: NotRequired[bool] - deprecation_list: NotRequired[List[str]] - distribution_scope: NotRequired[str] - force_backport: NotRequired[bool] - from_index: NotRequired[str] - graph_update_mode: NotRequired[GRAPH_MODE_LITERAL] - organization: NotRequired[str] - overwrite_from_index: NotRequired[bool] - overwrite_from_index_token: NotRequired[str] - - -class RmRequestPayload(TypedDict): - """Datastructure of the request to /builds/rm API point.""" - - add_arches: NotRequired[List[str]] - binary_image: NotRequired[str] - build_tags: NotRequired[List[str]] - distribution_scope: NotRequired[str] - from_index: str - operators: List[str] - overwrite_from_index: NotRequired[bool] - overwrite_from_index_token: Optional[str] - - -class FbcOperationRequestPayload(TypedDict): - """Datastructure of the request to /builds/fbc-operation API point.""" - - fbc_fragment: str - from_index: str - binary_image: NotRequired[str] - build_tags: NotRequired[List[str]] - add_arches: NotRequired[List[str]] - overwrite_from_index: NotRequired[bool] - overwrite_from_index_token: NotRequired[str] - batch: NotRequired[str] - distribution_scope: NotRequired[str] - user: NotRequired[str] - - -class RegenerateBundlePayload(TypedDict): - """Datastructure of the request to /builds/regenerate-bundle API point.""" - - from_bundle_image: str - organization: NotRequired[str] - registry_auths: NotRequired[Dict[str, Any]] - related_bundles: NotRequired[RelatedBundlesMetadata] - user: NotRequired[str] - batch: NotRequired[str] - - -class RegenerateBundleBatchPayload(TypedDict): - """Datastructure of the request to /builds/regenerate-bundle-batch API point.""" - - annotations: NotRequired[Dict[str, Any]] - build_requests: List[RegenerateBundlePayload] - - -class AddRmBatchPayload(TypedDict): - """Datastructure of the request to /builds/add-rm-batch API point.""" - - annotations: NotRequired[Dict[str, Any]] - build_requests: List[Union[AddRequestPayload, RmRequestPayload]] - - -class MergeIndexImagesPayload(TypedDict): - """Datastructure of the request to /builds/merge-index-image API point.""" - - binary_image: NotRequired[str] - build_tags: NotRequired[List[str]] - deprecation_list: NotRequired[List[str]] - distribution_scope: NotRequired[str] - graph_update_mode: NotRequired[GRAPH_MODE_LITERAL] - overwrite_target_index: NotRequired[bool] - overwrite_target_index_token: NotRequired[str] - source_from_index: str - target_index: NotRequired[str] - batch: NotRequired[str] - user: NotRequired[str] - - -class CreateEmptyIndexPayload(TypedDict): - """Datastructure of the request to /builds/create-empty-index API point.""" - - binary_image: NotRequired[str] - from_index: str - labels: NotRequired[Dict[str, str]] - output_fbc: NotRequired[bool] - - -class RecursiveRelatedBundlesRequestPayload(TypedDict): - """Datastructure of the request to /builds/recursive-related-bundles API point.""" - - batch: NotRequired[int] - organization: NotRequired[str] - parent_bundle_image: str - registry_auths: NotRequired[Dict[str, Any]] - user: NotRequired[str] - - -class RequestPayload(TypedDict): - """Datastructure with all the possible keys that can API points receive.""" - - add_arches: NotRequired[List[str]] - annotations: NotRequired[Dict[str, Any]] - batch: NotRequired[int] - binary_image: NotRequired[str] - build_requests: NotRequired[ - List[Union[AddRequestPayload, RmRequestPayload, RegenerateBundlePayload]] - ] - build_tags: NotRequired[List[str]] - bundles: NotRequired[Optional[List[str]]] - cnr_token: NotRequired[str] - check_related_images: NotRequired[bool] - deprecation_list: NotRequired[List[str]] - distribution_scope: NotRequired[str] - fbc_fragment: NotRequired[bool] - force_backport: NotRequired[bool] - from_bundle_image: NotRequired[str] - from_index: NotRequired[str] - labels: NotRequired[Dict[str, str]] - operators: NotRequired[List[str]] - organization: NotRequired[str] - output_fbc: NotRequired[bool] - overwrite_from_index: NotRequired[bool] - overwrite_from_index_token: NotRequired[str] - overwrite_target_index: NotRequired[bool] - overwrite_target_index_token: NotRequired[str] - registry_auths: NotRequired[Dict[str, Any]] - related_bundles: NotRequired[RelatedBundlesMetadata] - source_from_index: NotRequired[str] - target_index: NotRequired[str] - user: NotRequired[str] - - -PayloadTypesUnion = Union[ - AddRequestPayload, - CreateEmptyIndexPayload, - FbcOperationRequestPayload, - MergeIndexImagesPayload, - RecursiveRelatedBundlesRequestPayload, - RegenerateBundlePayload, - RmRequestPayload, -] - -# End of the Payloads Part # Start of the RequestResponses Part diff --git a/iib/web/models.py b/iib/web/models.py index b6d36b37..4f97223d 100644 --- a/iib/web/models.py +++ b/iib/web/models.py @@ -18,30 +18,21 @@ from iib.exceptions import ValidationError from iib.web import db - +from iib.common.pydantic_models import ( + UnionPydanticRequestType, +) from iib.web.iib_static_types import ( - AddRequestPayload, AddRequestResponse, - AddRmBatchPayload, AddRmRequestResponseBase, BaseClassRequestResponse, BuildRequestState, CommonIndexImageResponseBase, - CreateEmptyIndexPayload, CreateEmptyIndexRequestResponse, + FbcOperationRequestResponse, MergeIndexImageRequestResponse, - MergeIndexImagesPayload, - RequestPayload, - PayloadTypesUnion, - RecursiveRelatedBundlesRequestPayload, RecursiveRelatedBundlesRequestResponse, - RegenerateBundleBatchPayload, - RegenerateBundlePayload, RegenerateBundleRequestResponse, - RmRequestPayload, - FbcOperationRequestPayload, - FbcOperationRequestResponse, ) @@ -413,6 +404,7 @@ class Request(db.Model): 'polymorphic_on': 'type', } + @validates('type') def validate_type(self, key: Optional[str], type_num: int) -> int: """ @@ -493,21 +485,65 @@ def add_architecture(self, arch_name: str) -> None: if arch not in self.architectures: self.architectures.append(arch) - @abstractmethod - def from_json( + @classmethod + def from_json_replacement( cls, - kwargs: PayloadTypesUnion, - ) -> Request: + payload: UnionPydanticRequestType, + batch: Optional[Batch] = None, + build_tags_allowed: Optional[bool] = False, + ): """ - Handle JSON requests for a request API endpoint. + Handle JSON requests for the builds/* API endpoint. - Child classes MUST override this method. - - :param PayloadTypesUnion kwargs: the user provided parameters to create a Request - :return: an object representation of the request - :retype: Request + :param UnionPydanticRequestType payload: the Pydantic model representing the request. + :param Batch batch: the batch to specify with the request. """ - raise NotImplementedError('{} does not implement from_json'.format(cls.__name__)) + + keys_to_check = payload.get_keys_to_check_in_db() + for key in keys_to_check: + if key in [ + 'binary_image', + 'fbc_fragment', + 'from_index', + 'from_bundle_image', + 'source_from_index', + 'target_index', + 'parent_bundle_image', + ]: + payload.__setattr__(key, Image.get_or_create(pull_specification=payload.__getattribute__(key))) + + elif key in ["bundles", "deprecation_list"]: + payload.__setattr__(key, [ + Image.get_or_create(pull_specification=image) for image in payload.__getattribute__(key) + ]) + + elif key == "operators": + payload.__setattr__(key, [Operator.get_or_create(name=item) for item in payload.__getattribute__(key)]) + + else: + raise ValidationError(f"Unexpected key: {key} during from_json() method.") + + request_kwargs = payload.get_json_for_request() + + # current_user.is_authenticated is only ever False when auth is disabled + if current_user.is_authenticated: + request_kwargs['user'] = current_user + + # Add the request to a new batch + + batch = batch or Batch() + db.session.add(batch) + request_kwargs['batch'] = batch + + request = cls(**request_kwargs) + + if build_tags_allowed: + for bt in payload.build_tags: + request.add_build_tag(bt) + + request.add_state('in_progress', 'The request was initiated') + return request + # return value is BaseClassRequestResponse, however because of LSP, we need other types here too def to_json( @@ -627,31 +663,6 @@ def annotations(self, annotations: Optional[Dict[str, Any]]) -> None: json.dumps(annotations, sort_keys=True) if annotations is not None else None ) - @staticmethod - def validate_batch_request_params( - payload: Union[AddRmBatchPayload, RegenerateBundleBatchPayload] - ) -> None: - """ - Validate batch specific parameters from the input JSON payload. - - The requests in the "build_requests" key's value are not validated. Those should be - validated separately. - - :raises ValidationError: if the payload is invalid - """ - if ( - not isinstance(payload, dict) - or not isinstance(payload.get('build_requests'), list) - or not payload['build_requests'] - ): - raise ValidationError( - 'The input data must be a JSON object and the "build_requests" value must be a ' - 'non-empty array' - ) - - if not isinstance(payload.get('annotations', {}), dict): - raise ValidationError('The value of "annotations" must be a JSON object') - @property def state(self) -> str: """ @@ -798,30 +809,6 @@ def get_request_query_options(verbose: Optional[bool] = False) -> List[_Abstract return query_options -def validate_graph_mode(graph_update_mode: Optional[str], index_image: Optional[str]): - """ - Validate graph mode and check if index image is allowed to use different graph mode. - - :param str graph_update_mode: one of the graph mode options - :param str index_image: pullspec of index image to which graph mode should be applied to - :raises: ValidationError when incorrect graph_update_mode is set - :raises: Forbidden when graph_mode can't be used for given index image - - """ - if graph_update_mode: - graph_mode_options = current_app.config['IIB_GRAPH_MODE_OPTIONS'] - if graph_update_mode not in graph_mode_options: - raise ValidationError( - f'"graph_update_mode" must be set to one of these: {graph_mode_options}' - ) - allowed_from_indexes: List[str] = current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] - if index_image not in allowed_from_indexes: - raise Forbidden( - '"graph_update_mode" can only be used on the' - f' following index image: {allowed_from_indexes}' - ) - - class RequestIndexImageMixin: """ A class for shared functionality between index image requests. @@ -919,111 +906,6 @@ def distribution_scope(cls: DefaultMeta) -> Mapped[str]: """Return the distribution_scope for the request.""" return db.mapped_column(db.String, nullable=True) - # Union for request_kwargs would require exhausting checking of the request_kwargs in the method - @staticmethod - def _from_json( - request_kwargs: RequestPayload, - additional_required_params: Optional[List[str]] = None, - additional_optional_params: Optional[List[str]] = None, - batch: Optional[Batch] = None, - ) -> None: - """ - Validate and process request agnostic parameters. - - As part of the processing, the input ``request_kwargs`` parameter - is updated to reference database objects where appropriate. - - :param dict request_kwargs: copy of args provided in API request - :param Batch batch: the batch to specify with the request. If one is not specified, one will - be created automatically. - """ - # Validate all required parameters are present - required_params = set(additional_required_params or []) - optional_params = { - 'add_arches', - 'binary_image', - 'overwrite_from_index', - 'overwrite_from_index_token', - 'distribution_scope', - 'build_tags', - 'output_fbc', - } | set(additional_optional_params or []) - - validate_request_params( - request_kwargs, required_params=required_params, optional_params=optional_params - ) - - # Check if both `from_index` and `add_arches` are not specified - if not request_kwargs.get('from_index') and not request_kwargs.get('add_arches'): - raise ValidationError('One of "from_index" or "add_arches" must be specified') - - # Verify that `overwrite_from_index` is the correct type - overwrite = request_kwargs.pop('overwrite_from_index', False) - if not isinstance(overwrite, bool): - raise ValidationError('The "overwrite_from_index" parameter must be a boolean') - - # Verify that `overwrite_from_index_token` is the correct type - overwrite_token = request_kwargs.pop('overwrite_from_index_token', None) - if overwrite_token: - if not isinstance(overwrite_token, str): - raise ValidationError('The "overwrite_from_index_token" parameter must be a string') - if overwrite_token and not overwrite: - raise ValidationError( - 'The "overwrite_from_index" parameter is required when' - ' the "overwrite_from_index_token" parameter is used' - ) - - distribution_scope = request_kwargs.pop('distribution_scope', None) - if distribution_scope: - distribution_scope = distribution_scope.lower() - if distribution_scope not in ['prod', 'stage', 'dev']: - raise ValidationError( - 'The "distribution_scope" value must be one of "dev", "stage", or "prod"' - ) - request_kwargs['distribution_scope'] = distribution_scope - - # Prevent duplicated items in "deprecation_list" - deprecation_list = request_kwargs.pop('deprecation_list', None) - if deprecation_list: - request_kwargs['deprecation_list'] = list(set(deprecation_list)) - - # Verify the user is authorized to use overwrite_from_index - # current_user.is_authenticated is only ever False when auth is disabled - if current_user.is_authenticated: - if overwrite and not overwrite_token: - raise Forbidden( - 'You must set "overwrite_from_index_token" to use "overwrite_from_index"' - ) - - # Validate add_arches are correctly provided - add_arches = request_kwargs.pop('add_arches', []) - Architecture.validate_architecture_json(add_arches) - - # Validate binary_image is correctly provided - binary_image = request_kwargs.pop('binary_image', None) - if binary_image is not None and not isinstance(binary_image, str): - raise ValidationError('The "binary_image" value must be a string') - elif not binary_image and not current_app.config['IIB_BINARY_IMAGE_CONFIG']: - raise ValidationError('The "binary_image" value must be a non-empty string') - - if binary_image: - request_kwargs['binary_image'] = Image.get_or_create(pull_specification=binary_image) - - if 'from_index' in request_kwargs: - if not isinstance(request_kwargs['from_index'], str): - raise ValidationError('"from_index" must be a string') - request_kwargs['from_index'] = Image.get_or_create( - pull_specification=request_kwargs['from_index'] - ) - - # current_user.is_authenticated is only ever False when auth is disabled - if current_user.is_authenticated: - request_kwargs['user'] = current_user - - # Add the request to a new batch - batch = batch or Batch() - db.session.add(batch) - request_kwargs['batch'] = batch def get_common_index_image_json(self) -> CommonIndexImageResponseBase: """ @@ -1105,101 +987,6 @@ class RequestAdd(Request, RequestIndexImageMixin): __mapper_args__ = {'polymorphic_identity': RequestTypeMapping.__members__['add'].value} - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: AddRequestPayload, - batch: Optional[Batch] = None, - ) -> RequestAdd: - """ - Handle JSON requests for the Add API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. - """ - request_kwargs = deepcopy(kwargs) - - for key in ('bundles', 'deprecation_list'): - value = request_kwargs.get(key, []) - if not isinstance(value, list) or any( - not item or not isinstance(item, str) for item in value - ): - raise ValidationError( - f'"{key}" should be either an empty array or an array of non-empty strings' - ) - - # Check if no bundles and `from_index is specified - # if no bundles and no from index then an empty index will be created which is a no-op - if not (request_kwargs.get('bundles') or request_kwargs.get('from_index')): - raise ValidationError('"from_index" must be specified if no bundles are specified') - - # Verify that `check_related_images` is specified when bundles are specified - if request_kwargs.get('check_related_images') and not request_kwargs.get('bundles'): - raise ValidationError( - '"check_related_images" must be specified only when bundles are specified' - ) - - # Verify that `check_related_images` is the correct type - check_related_images = request_kwargs.get('check_related_images', False) - if not isinstance(check_related_images, bool): - raise ValidationError('The "check_related_images" parameter must be a boolean') - - ALLOWED_KEYS_1: Sequence[Literal['cnr_token', 'graph_update_mode', 'organization']] = ( - 'cnr_token', - 'graph_update_mode', - 'organization', - ) - for param in ALLOWED_KEYS_1: - if param not in request_kwargs: - continue - - if not isinstance(request_kwargs[param], str): - raise ValidationError(f'"{param}" must be a string') - - if param == 'graph_update_mode': - validate_graph_mode(request_kwargs[param], request_kwargs.get('from_index')) - - if not isinstance(request_kwargs.get('force_backport', False), bool): - raise ValidationError('"force_backport" must be a boolean') - - # Remove attributes that are not stored in the database - request_kwargs.pop('cnr_token', None) - request_kwargs.pop('force_backport', None) - - # cast to more wider type, see _from_json method - cls._from_json( - cast(RequestPayload, request_kwargs), - additional_optional_params=[ - 'from_index', - 'organization', - 'bundles', - 'distribution_scope', - 'deprecation_list', - 'graph_update_mode', - 'build_tags', - 'check_related_images', - ], - batch=batch, - ) - - ALLOWED_KEYS_2: Sequence[Literal['bundles', 'deprecation_list']] = ( - 'bundles', - 'deprecation_list', - ) - for key in ALLOWED_KEYS_2: - request_kwargs[key] = [ - Image.get_or_create(pull_specification=item) - for item in request_kwargs.get(key, []) # type: ignore - ] - build_tags = request_kwargs.pop('build_tags', []) - request = cls(**request_kwargs) - - for bt in build_tags: - request.add_build_tag(bt) - - request.add_state('in_progress', 'The request was initiated') - return request - def to_json(self, verbose: Optional[bool] = True) -> AddRequestResponse: """ Provide the JSON representation of an "add" build request. @@ -1259,46 +1046,6 @@ class RequestRm(Request, RequestIndexImageMixin): __mapper_args__ = {'polymorphic_identity': RequestTypeMapping.__members__['rm'].value} - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: RmRequestPayload, - batch: Optional[Batch] = None, - ) -> RequestRm: - """ - Handle JSON requests for the Remove API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. - """ - request_kwargs = deepcopy(kwargs) - - operators = request_kwargs.get('operators', []) - if ( - not isinstance(operators, list) - or len(operators) == 0 - or any(not item or not isinstance(item, str) for item in operators) - ): - raise ValidationError('"operators" should be a non-empty array of strings') - - # cast to more wider type, see _from_json method - cls._from_json( - cast(RequestPayload, request_kwargs), - additional_required_params=['operators', 'from_index'], - batch=batch, - ) - - request_kwargs['operators'] = [Operator.get_or_create(name=item) for item in operators] - - build_tags = request_kwargs.pop('build_tags', []) - request = cls(**request_kwargs) - request.add_state('in_progress', 'The request was initiated') - - for bt in build_tags: - request.add_build_tag(bt) - - return request - def to_json(self, verbose: Optional[bool] = True) -> AddRmRequestResponseBase: """ Provide the JSON representation of an "rm" build request. @@ -1381,70 +1128,6 @@ def bundle_replacements(self, bundle_replacements: Dict[str, str]) -> None: json.dumps(bundle_replacements, sort_keys=True) if bundle_replacements else None ) - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: RegenerateBundlePayload, - batch: Optional[Batch] = None, - ) -> RequestRegenerateBundle: - """ - Handle JSON requests for the Regenerate Bundle API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. If one is not specified, one will - be created automatically. - """ - batch = batch or Batch() - request_kwargs = deepcopy(kwargs) - - validate_request_params( - request_kwargs, - required_params={'from_bundle_image'}, - optional_params={'bundle_replacements', 'organization', 'registry_auths'}, - ) - # Validate bundle_replacements is correctly provided - bundle_replacements = request_kwargs.get('bundle_replacements', {}) - if bundle_replacements: - if not isinstance(bundle_replacements, dict): - raise ValidationError('The value of "bundle_replacements" must be a JSON object') - - for key, value in bundle_replacements.items(): - if not isinstance(value, str) or not isinstance(key, str): - raise ValidationError(f'The key and value of "{key}" must be a string') - - # Validate organization is correctly provided - organization = request_kwargs.get('organization') - if organization and not isinstance(organization, str): - raise ValidationError('"organization" must be a string') - - # Validate from_bundle_image is correctly provided - from_bundle_image = request_kwargs.get('from_bundle_image') - if not isinstance(from_bundle_image, str): - raise ValidationError('"from_bundle_image" must be a string') - - # Remove attributes that are not stored in the database - registry_auths = request_kwargs.pop('registry_auths', None) - - # Check that registry_auths were provided in valid format - if registry_auths: - validate_registry_auths(registry_auths) - - request_kwargs['from_bundle_image'] = Image.get_or_create( - pull_specification=from_bundle_image - ) - - # current_user.is_authenticated is only ever False when auth is disabled - if current_user.is_authenticated: - request_kwargs['user'] = current_user - - # Add the request to a new batch - db.session.add(batch) - request_kwargs['batch'] = batch - - request = cls(**request_kwargs) - request.add_state('in_progress', 'The request was initiated') - return request - def to_json(self, verbose: Optional[bool] = True) -> RegenerateBundleRequestResponse: """ Provide the JSON representation of a "regenerate-bundle" build request. @@ -1540,114 +1223,6 @@ class RequestMergeIndexImage(Request): 'polymorphic_identity': RequestTypeMapping.__members__['merge_index_image'].value } - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: MergeIndexImagesPayload, - batch: Optional[Batch] = None, - ) -> RequestMergeIndexImage: - """ - Handle JSON requests for the merge-index-image API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. - """ - request_kwargs = deepcopy(kwargs) - - deprecation_list = request_kwargs.pop('deprecation_list', []) - if not isinstance(deprecation_list, list) or any( - not item or not isinstance(item, str) for item in deprecation_list - ): - raise ValidationError( - 'The "deprecation_list" value should be an empty array or an array of strings' - ) - - request_kwargs['deprecation_list'] = [ - Image.get_or_create(pull_specification=item) for item in deprecation_list - ] - - source_from_index = request_kwargs.get('source_from_index', None) - if not (isinstance(source_from_index, str) and source_from_index): - raise ValidationError('The "source_from_index" value must be a string') - request_kwargs['source_from_index'] = Image.get_or_create( - pull_specification=source_from_index - ) - - graph_update_mode = request_kwargs.get('graph_update_mode') - validate_graph_mode(graph_update_mode, request_kwargs.get('target_index')) - - target_index = request_kwargs.pop('target_index', None) - if target_index: - if not isinstance(target_index, str): - raise ValidationError('The "target_index" value must be a string') - request_kwargs['target_index'] = Image.get_or_create(pull_specification=target_index) - - # Verify that `overwrite_target_index` is the correct type - overwrite = request_kwargs.pop('overwrite_target_index', False) - if not isinstance(overwrite, bool): - raise ValidationError('The "overwrite_target_index" value must be a boolean') - - # Verify that `overwrite_target_index_token` is the correct type - overwrite_token = request_kwargs.pop('overwrite_target_index_token', None) - if overwrite_token: - if not isinstance(overwrite_token, str): - raise ValidationError('The "overwrite_target_index_token" value must be a string') - if overwrite_token and not overwrite: - raise ValidationError( - 'The "overwrite_target_index" value is required when' - ' the "overwrite_target_index_token" value is used' - ) - elif overwrite: - raise ValidationError( - 'The "overwrite_target_index_token" value is required when' - ' the "overwrite_target_index" value is set' - ) - - # Validate binary_image is correctly provided - binary_image = request_kwargs.pop('binary_image', None) - if binary_image is not None and not isinstance(binary_image, str): - raise ValidationError('The "binary_image" value must be a string') - elif not binary_image and not current_app.config['IIB_BINARY_IMAGE_CONFIG']: - raise ValidationError('The "binary_image" value must be a non-empty string') - - if binary_image: - request_kwargs['binary_image'] = Image.get_or_create(pull_specification=binary_image) - - distribution_scope = request_kwargs.pop('distribution_scope', None) - if distribution_scope: - distribution_scope = distribution_scope.lower() - if distribution_scope not in ['prod', 'stage', 'dev']: - raise ValidationError( - 'The "distribution_scope" value must be one of "dev", "stage", or "prod"' - ) - request_kwargs['distribution_scope'] = distribution_scope - - if not isinstance(request_kwargs.get('build_tags', []), list) or any( - not item or not isinstance(item, str) for item in request_kwargs.get('build_tags', []) - ): - raise ValidationError( - '"build_tags" should be either an empty array or an array of non-empty strings' - ) - - # current_user.is_authenticated is only ever False when auth is disabled - if current_user.is_authenticated: - request_kwargs['user'] = current_user - - # Add the request to a new batch - batch = batch or Batch() - db.session.add(batch) - request_kwargs['batch'] = batch - - request = cls(**request_kwargs) - - build_tags = request_kwargs.pop('build_tags', []) - - for bt in build_tags: - request.add_build_tag(bt) - - request.add_state('in_progress', 'The request was initiated') - return request - def to_json(self, verbose: Optional[bool] = True) -> MergeIndexImageRequestResponse: """ Provide the JSON representation of an "merge-index-image" build request. @@ -1770,78 +1345,6 @@ def get_or_create(cls, username: str) -> User: return user -def validate_request_params( - request_params: Union[RequestPayload, PayloadTypesUnion], - required_params: Set[str], - optional_params: Set[str], -) -> None: - """ - Validate parameters for a build request. - - All required parameters must be set in the request_params and - unknown parameters are not allowed. - - :param Union[RequestPayload, PayloadTypesUnion] request_params: the request parameters - provided by the user - :param set required_params: the set of required parameters - :param set optional_params: the set of optional parameters - :raises iib.exceptions.ValidationError: if validation of parameters fails - """ - missing_params = required_params - request_params.keys() - if missing_params: - raise ValidationError('Missing required parameter(s): {}'.format(', '.join(missing_params))) - - # Don't allow the user to set arbitrary columns or relationships - invalid_params = request_params.keys() - required_params - optional_params - if invalid_params: - raise ValidationError( - 'The following parameters are invalid: {}'.format(', '.join(invalid_params)) - ) - - # Verify that all the required parameters are set and not empty - for param in required_params: - if not request_params.get(param): - raise ValidationError(f'"{param}" must be set') - - # If any optional parameters are set but are empty, just remove them since they are - # treated as null values - for param in optional_params: - if ( - param in request_params - and not isinstance(request_params.get(param), bool) - and not request_params[param] # type: ignore - ): - del request_params[param] # type: ignore - - -def validate_registry_auths(registry_auths: Dict[str, Any]) -> None: - """ - Validate registry_auths for a build request. - - Only auth item in dockerconfig.json is supported for iib. - - :param dict registry_auths: User provided dockerconfig for authentication - to private registries - :raises ValidationError: if registry_auths are not in valid format - """ - auths = 'auths' - if not isinstance(registry_auths, dict): - raise ValidationError('"registry_auths" must be a dict') - if list(registry_auths.keys()) != [auths]: - raise ValidationError(f'"registry_auths" must contain single key "{auths}"') - if not registry_auths[auths] or not isinstance(registry_auths[auths], dict): - raise ValidationError(f'"registry_auths.{auths}" must be a non-empty dict') - for reg, auth_dict in registry_auths[auths].items(): - err_msg = ( - f'{reg} in registry_auths has auth value in incorrect format. ' - 'See the API docs for details on the expected format' - ) - if not isinstance(auth_dict, dict) or len(auth_dict) != 1: - raise ValidationError(err_msg) - if not all(k == 'auth' and isinstance(v, str) for (k, v) in auth_dict.items()): - raise ValidationError(err_msg) - - class RequestCreateEmptyIndex(Request, RequestIndexImageMixin): """An "create-empty-index" image build request.""" @@ -1874,64 +1377,6 @@ def labels(self, labels: Optional[Dict[str, Any]]) -> None: """ self._labels = json.dumps(labels, sort_keys=True) if labels is not None else None - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: CreateEmptyIndexPayload, - batch: Optional[Batch] = None, - ) -> RequestCreateEmptyIndex: - """ - Handle JSON requests for the create-empty-index API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. - """ - request_kwargs = deepcopy(kwargs) - if request_kwargs.get('from_index') is None: - raise ValidationError('"from_index" must be a specified') - if ( - not isinstance(request_kwargs.get('from_index'), str) - or len(str(request_kwargs.get('from_index'))) == 0 - ): - raise ValidationError('"from_index" must be a non-empty string') - if request_kwargs.get('output_fbc') and not isinstance( - request_kwargs.get('output_fbc'), bool - ): - raise ValidationError('"output_fbc" should be boolean') - - new_labels = request_kwargs.get('labels') - if new_labels is not None: - if not isinstance(new_labels, dict): - raise ValidationError('The value of "labels" must be a JSON object') - - for key, value in new_labels.items(): - if not isinstance(value, str) or not isinstance(key, str): - raise ValidationError(f'The key and value of "{key}" must be a string') - - for arg in ( - 'add_arches', - 'overwrite_from_index', - 'overwrite_from_index_token', - 'build_tags', - ): - if arg in request_kwargs: - raise ValidationError( - f'The "{arg}" arg is invalid for the create-empty-index endpoint.' - ) - - # cast to more wider type, see _from_json method - cls._from_json( - cast(RequestPayload, request_kwargs), - additional_required_params=['from_index'], - additional_optional_params=['labels'], - batch=batch, - ) - - request = cls(**request_kwargs) - request.add_state('in_progress', 'The request was initiated') - - return request - def to_json(self, verbose: Optional[bool] = True) -> CreateEmptyIndexRequestResponse: """ Provide the JSON representation of an "create-empty-index" build request. @@ -2001,61 +1446,6 @@ class RequestRecursiveRelatedBundles(Request): } build_tags = None - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: RecursiveRelatedBundlesRequestPayload, - batch: Optional[Batch] = None, - ): - """ - Handle JSON requests for the Recursive Related Bundles API endpoint. - - :param dict kwargs: the JSON payload of the request. - :param Batch batch: the batch to specify with the request. If one is not specified, one will - be created automatically. - """ - batch = batch or Batch() - request_kwargs = deepcopy(kwargs) - - validate_request_params( - request_kwargs, - required_params={'parent_bundle_image'}, - optional_params={'organization', 'registry_auths'}, - ) - - # Validate organization is correctly provided - organization = request_kwargs.get('organization') - if organization and not isinstance(organization, str): - raise ValidationError('"organization" must be a string') - - # Validate parent_bundle_image is correctly provided - parent_bundle_image = request_kwargs.get('parent_bundle_image') - if not isinstance(parent_bundle_image, str): - raise ValidationError('"parent_bundle_image" must be a string') - - # Remove attributes that are not stored in the database - registry_auths = request_kwargs.pop('registry_auths', None) - - # Check that registry_auths were provided in valid format - if registry_auths: - validate_registry_auths(registry_auths) - - request_kwargs['parent_bundle_image'] = Image.get_or_create( - pull_specification=parent_bundle_image - ) - - # current_user.is_authenticated is only ever False when auth is disabled - if current_user.is_authenticated: - request_kwargs['user'] = current_user - - # Add the request to a new batch - db.session.add(batch) - request_kwargs['batch'] = batch - - request = cls(**request_kwargs) - request.add_state('in_progress', 'The request was initiated') - return request - def to_json(self, verbose: Optional[bool] = True) -> RecursiveRelatedBundlesRequestResponse: """ Provide the JSON representation of a "recursive-related-bundles" build request. @@ -2113,57 +1503,6 @@ class RequestFbcOperations(Request, RequestIndexImageMixin): 'polymorphic_identity': RequestTypeMapping.__members__['fbc_operations'].value } - @classmethod - def from_json( # type: ignore[override] # noqa: F821 - cls, - kwargs: FbcOperationRequestPayload, - ): - """ - Handle JSON requests for the fbc-operations API endpoint. - - :param dict kwargs: the JSON payload of the request. - """ - request_kwargs = deepcopy(kwargs) - - validate_request_params( - request_kwargs, - required_params={'fbc_fragment', 'from_index'}, - optional_params={ - 'add_arches', - 'binary_image', - 'distribution_scope', - 'build_tags', - 'overwrite_from_index', - 'overwrite_from_index_token', - }, - ) - - # Validate parent_bundle_image is correctly provided - fbc_fragment = request_kwargs.get('fbc_fragment') - if not isinstance(fbc_fragment, str): - raise ValidationError('The "fbc_fragment" must be a string') - request_kwargs['fbc_fragment'] = Image.get_or_create(pull_specification=fbc_fragment) - - # cast to more wider type, see _from_json method - cls._from_json( - cast(RequestPayload, request_kwargs), - additional_optional_params=[ - 'bundles', - 'fbc_fragment', - 'from_index', - 'organization', - ], - ) - - build_tags = request_kwargs.pop('build_tags', []) - request = cls(**request_kwargs) - - for bt in build_tags: - request.add_build_tag(bt) - - request.add_state('in_progress', 'The request was initiated') - return request - def to_json(self, verbose: Optional[bool] = True) -> FbcOperationRequestResponse: """ Provide the JSON representation of a "fbc-operation" build request. diff --git a/iib/workers/tasks/build.py b/iib/workers/tasks/build.py index 1f275bd2..32a3f1b1 100644 --- a/iib/workers/tasks/build.py +++ b/iib/workers/tasks/build.py @@ -58,6 +58,7 @@ GreenwaveConfig, UpdateRequestPayload, ) +from iib.common.pydantic_models import AddPydanticModel, RmPydanticModel __all__ = ['handle_add_request', 'handle_rm_request'] @@ -832,24 +833,10 @@ def inspect_related_images(bundles: List[str], request_id) -> None: @app.task @request_logger def handle_add_request( - bundles: List[str], + payload: AddPydanticModel, request_id: int, - binary_image: Optional[str] = None, - from_index: Optional[str] = None, - add_arches: Optional[Set[str]] = None, - cnr_token: Optional[str] = None, - organization: Optional[str] = None, - force_backport: bool = False, - overwrite_from_index: bool = False, - overwrite_from_index_token: Optional[str] = None, - distribution_scope: Optional[str] = None, greenwave_config: Optional[GreenwaveConfig] = None, binary_image_config: Optional[Dict[str, Dict[str, str]]] = None, - deprecation_list: Optional[List[str]] = None, - build_tags: Optional[List[str]] = None, - graph_update_mode: Optional[str] = None, - check_related_images: bool = False, - traceparent: Optional[str] = None, ) -> None: """ Coordinate the the work needed to build the index image with the input bundles. @@ -893,10 +880,10 @@ def handle_add_request( # Resolve bundles to their digests set_request_state(request_id, 'in_progress', 'Resolving the bundles') - with set_registry_token(overwrite_from_index_token, from_index, append=True): - resolved_bundles = get_resolved_bundles(bundles) + with set_registry_token(payload.overwrite_from_index_token, payload.from_index, append=True): + resolved_bundles = get_resolved_bundles(payload.bundles) verify_labels(resolved_bundles) - if check_related_images: + if payload.check_related_images: inspect_related_images(resolved_bundles, request_id) # Check if Gating passes for all the bundles @@ -908,23 +895,23 @@ def handle_add_request( prebuild_info = prepare_request_for_build( request_id, RequestConfigAddRm( - _binary_image=binary_image, - from_index=from_index, - overwrite_from_index_token=overwrite_from_index_token, - add_arches=add_arches, - bundles=bundles, - distribution_scope=distribution_scope, + _binary_image=payload.binary_image, + from_index=payload.from_index, + overwrite_from_index_token=payload.overwrite_from_index_token, + add_arches=payload.add_arches, + bundles=payload.bundles, + distribution_scope=payload.distribution_scope, binary_image_config=binary_image_config, ), ) from_index_resolved = prebuild_info['from_index_resolved'] - with set_registry_token(overwrite_from_index_token, from_index_resolved): - is_fbc = is_image_fbc(from_index_resolved) if from_index else False + with set_registry_token(payload.overwrite_from_index_token, from_index_resolved): + is_fbc = is_image_fbc(from_index_resolved) if payload.from_index else False if is_fbc: # logging requested by stakeholders do not delete log.info("Processing File-Based Catalog image") - if (cnr_token and organization) or force_backport: + if (payload.cnr_token and payload.organization) or payload.force_backport: log.warning( "Legacy support is deprecated in IIB. " "cnr_token, organization and force_backport parameters will be ignored." @@ -934,12 +921,12 @@ def handle_add_request( present_bundles: List[BundleImage] = [] present_bundles_pull_spec: List[str] = [] with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: - if from_index: + if payload.from_index: msg = 'Checking if bundles are already present in index image' log.info(msg) set_request_state(request_id, 'in_progress', msg) - with set_registry_token(overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): present_bundles, present_bundles_pull_spec = _get_present_bundles( from_index_resolved, temp_dir ) @@ -962,8 +949,8 @@ def handle_add_request( bundles=resolved_bundles, binary_image=prebuild_info['binary_image_resolved'], from_index=from_index_resolved, - graph_update_mode=graph_update_mode, - overwrite_from_index_token=overwrite_from_index_token, + graph_update_mode=payload.graph_update_mode, + overwrite_from_index_token=payload.overwrite_from_index_token, overwrite_csv=(prebuild_info['distribution_scope'] in ['dev', 'stage']), ) else: @@ -972,8 +959,8 @@ def handle_add_request( bundles=resolved_bundles, binary_image=prebuild_info['binary_image_resolved'], from_index=from_index_resolved, - graph_update_mode=graph_update_mode, - overwrite_from_index_token=overwrite_from_index_token, + graph_update_mode=payload.graph_update_mode, + overwrite_from_index_token=payload.overwrite_from_index_token, overwrite_csv=(prebuild_info['distribution_scope'] in ['dev', 'stage']), container_tool='podman', ) @@ -986,7 +973,7 @@ def handle_add_request( add_max_ocp_version_property(resolved_bundles, temp_dir) deprecation_bundles = get_bundles_from_deprecation_list( - present_bundles_pull_spec + resolved_bundles, deprecation_list or [] + present_bundles_pull_spec + resolved_bundles, payload.deprecation_list or [] ) arches = prebuild_info['arches'] @@ -1010,7 +997,7 @@ def handle_add_request( ) with set_registry_token( - overwrite_from_index_token, from_index_resolved, append=True + payload.overwrite_from_index_token, from_index_resolved, append=True ): deprecate_bundles( bundles=deprecation_bundles, @@ -1046,7 +1033,7 @@ def handle_add_request( ) # get catalog with opted-in operators os.makedirs(os.path.join(temp_dir, 'from_index'), exist_ok=True) - with set_registry_token(overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): catalog_from_index = get_catalog_dir( from_index=from_index_resolved, base_dir=os.path.join(temp_dir, 'from_index') ) @@ -1099,15 +1086,15 @@ def handle_add_request( ) set_request_state(request_id, 'in_progress', 'Creating the manifest list') - output_pull_spec = _create_and_push_manifest_list(request_id, arches, build_tags) + output_pull_spec = _create_and_push_manifest_list(request_id, arches, payload.build_tags) _update_index_image_pull_spec( output_pull_spec, request_id, arches, - from_index, - overwrite_from_index, - overwrite_from_index_token, + payload.from_index, + payload.overwrite_from_index, + payload.overwrite_from_index_token, from_index_resolved, add_or_rm=True, ) @@ -1120,16 +1107,9 @@ def handle_add_request( @app.task @request_logger def handle_rm_request( - operators: List[str], + payload: RmPydanticModel, request_id: int, - from_index: str, - binary_image: Optional[str] = None, - add_arches: Optional[Set[str]] = None, - overwrite_from_index: bool = False, - overwrite_from_index_token: Optional[str] = None, - distribution_scope: Optional[str] = None, binary_image_config: Optional[Dict[str, Dict[str, str]]] = None, - build_tags: Optional[List[str]] = None, ) -> None: """ Coordinate the work needed to remove the input operators and rebuild the index image. @@ -1159,11 +1139,11 @@ def handle_rm_request( prebuild_info = prepare_request_for_build( request_id, RequestConfigAddRm( - _binary_image=binary_image, - from_index=from_index, - overwrite_from_index_token=overwrite_from_index_token, - add_arches=add_arches, - distribution_scope=distribution_scope, + _binary_image=payload.binary_image, + from_index=payload.from_index, + overwrite_from_index_token=payload.overwrite_from_index_token, + add_arches=payload.add_arches, + distribution_scope=payload.distribution_scope, binary_image_config=binary_image_config, ), ) @@ -1172,7 +1152,7 @@ def handle_rm_request( from_index_resolved = prebuild_info['from_index_resolved'] with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: - with set_registry_token(overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): image_is_fbc = is_image_fbc(from_index_resolved) if image_is_fbc: @@ -1180,9 +1160,9 @@ def handle_rm_request( fbc_dir, _ = opm_registry_rm_fbc( base_dir=temp_dir, from_index=from_index_resolved, - operators=operators, + operators=payload.operators, binary_image=prebuild_info['binary_image'], - overwrite_from_index_token=overwrite_from_index_token, + overwrite_from_index_token=payload.overwrite_from_index_token, generate_cache=False, ) @@ -1193,12 +1173,12 @@ def handle_rm_request( os.makedirs(os.path.join(temp_dir, 'from_index'), exist_ok=True) # get catalog with opted-in operators - with set_registry_token(overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): catalog_from_index = get_catalog_dir( from_index=from_index_resolved, base_dir=os.path.join(temp_dir, 'from_index') ) # remove operators from from_index file-based catalog - for operator in operators: + for operator in payload.operators: operator_path = os.path.join(catalog_from_index, operator) if os.path.exists(operator_path): log.debug('Removing operator from from_index FBC %s', operator_path) @@ -1223,10 +1203,10 @@ def handle_rm_request( else: _opm_index_rm( base_dir=temp_dir, - operators=operators, + operators=payload.operators, binary_image=prebuild_info['binary_image'], from_index=from_index_resolved, - overwrite_from_index_token=overwrite_from_index_token, + overwrite_from_index_token=payload.overwrite_from_index_token, container_tool='podman', ) @@ -1259,15 +1239,15 @@ def handle_rm_request( ) set_request_state(request_id, 'in_progress', 'Creating the manifest list') - output_pull_spec = _create_and_push_manifest_list(request_id, arches, build_tags) + output_pull_spec = _create_and_push_manifest_list(request_id, arches, payload.build_tags) _update_index_image_pull_spec( output_pull_spec, request_id, arches, - from_index, - overwrite_from_index, - overwrite_from_index_token, + payload.from_index, + payload.overwrite_from_index, + payload.overwrite_from_index_token, from_index_resolved, add_or_rm=True, ) diff --git a/iib/workers/tasks/build_create_empty_index.py b/iib/workers/tasks/build_create_empty_index.py index d6f0cfe0..85e3eb6c 100644 --- a/iib/workers/tasks/build_create_empty_index.py +++ b/iib/workers/tasks/build_create_empty_index.py @@ -27,6 +27,7 @@ grpcurl_get_db_data, ) from iib.workers.tasks.iib_static_types import PrebuildInfo +from iib.common.pydantic_models import CreateEmptyIndexPydanticModel __all__ = ['handle_create_empty_index_request'] @@ -61,11 +62,8 @@ def _get_present_operators(from_index: str, base_dir: str) -> List[str]: @app.task @request_logger def handle_create_empty_index_request( - from_index: str, + payload: CreateEmptyIndexPydanticModel, request_id: int, - output_fbc: bool = False, - binary_image: Optional[str] = None, - labels: Optional[Dict[str, str]] = None, binary_image_config: Optional[Dict[str, Dict[str, str]]] = None, ) -> None: """Coordinate the the work needed to create the index image with labels. @@ -84,15 +82,15 @@ def handle_create_empty_index_request( prebuild_info: PrebuildInfo = prepare_request_for_build( request_id, RequestConfigCreateIndexImage( - _binary_image=binary_image, - from_index=from_index, + _binary_image=payload.binary_image, + from_index=payload.from_index, binary_image_config=binary_image_config, ), ) from_index_resolved = prebuild_info['from_index_resolved'] - prebuild_info['labels'] = labels + prebuild_info['labels'] = payload.labels - if not output_fbc and is_image_fbc(from_index_resolved): + if not payload.output_fbc and is_image_fbc(from_index_resolved): log.debug('%s is FBC index image', from_index_resolved) err_msg = 'Cannot create SQLite index image from File-Based Catalog index image' log.error(err_msg) @@ -107,13 +105,13 @@ def handle_create_empty_index_request( # if output_fbc parameter is true, create an empty FBC index image # else create empty SQLite index image - if output_fbc: - log.debug('Creating empty FBC index image from %s', from_index) + if payload.output_fbc: + log.debug('Creating empty FBC index image from %s', payload.from_index) opm_create_empty_fbc( request_id=request_id, temp_dir=temp_dir, from_index_resolved=from_index_resolved, - from_index=from_index, + from_index=payload.from_index, binary_image=prebuild_info['binary_image'], operators=operators, ) @@ -136,8 +134,8 @@ def handle_create_empty_index_request( 'com.redhat.index.delivery.distribution_scope': prebuild_info['distribution_scope'], } - if labels: - iib_labels.update(labels) + if payload.labels: + iib_labels.update(payload.labels) for index_label, value in iib_labels.items(): _add_label_to_index(index_label, value, temp_dir, 'index.Dockerfile') @@ -154,7 +152,7 @@ def handle_create_empty_index_request( output_pull_spec=output_pull_spec, request_id=request_id, arches=arches, - from_index=from_index, + from_index=payload.from_index, resolved_prebuild_from_index=from_index_resolved, ) _cleanup() diff --git a/iib/workers/tasks/build_fbc_operations.py b/iib/workers/tasks/build_fbc_operations.py index 4a58234b..c2323559 100644 --- a/iib/workers/tasks/build_fbc_operations.py +++ b/iib/workers/tasks/build_fbc_operations.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later import logging import tempfile -from typing import Dict, Optional, Set +from typing import Dict, Optional from iib.workers.api_utils import set_request_state from iib.workers.tasks.build import ( @@ -22,7 +22,7 @@ set_registry_token, RequestConfigFBCOperation, ) - +from iib.common.pydantic_models import FbcOperationsPydanticModel __all__ = ['handle_fbc_operation_request'] log = logging.getLogger(__name__) @@ -31,15 +31,8 @@ @app.task @request_logger def handle_fbc_operation_request( + payload: FbcOperationsPydanticModel, request_id: int, - fbc_fragment: str, - from_index: Optional[str] = None, - binary_image: Optional[str] = None, - distribution_scope: Optional[str] = None, - overwrite_from_index: bool = False, - overwrite_from_index_token: Optional[str] = None, - build_tags: Optional[Set[str]] = None, - add_arches: Optional[Set[str]] = None, binary_image_config: Optional[Dict[str, Dict[str, str]]] = None, ) -> None: """ @@ -58,18 +51,18 @@ def handle_fbc_operation_request( _cleanup() set_request_state(request_id, 'in_progress', 'Resolving the fbc fragment') - with set_registry_token(overwrite_from_index_token, fbc_fragment, append=True): - resolved_fbc_fragment = get_resolved_image(fbc_fragment) + with set_registry_token(payload.overwrite_from_index_token, payload.fbc_fragment, append=True): + resolved_fbc_fragment = get_resolved_image(payload.fbc_fragment) prebuild_info = prepare_request_for_build( request_id, RequestConfigFBCOperation( - _binary_image=binary_image, - from_index=from_index, - overwrite_from_index_token=overwrite_from_index_token, - add_arches=add_arches, - fbc_fragment=fbc_fragment, - distribution_scope=distribution_scope, + _binary_image=payload.binary_image, + from_index=payload.from_index, + overwrite_from_index_token=payload.overwrite_from_index_token, + add_arches=payload.add_arches, + fbc_fragment=payload.fbc_fragment, + distribution_scope=payload.distribution_scope, binary_image_config=binary_image_config, ), ) @@ -88,7 +81,7 @@ def handle_fbc_operation_request( from_index_resolved, binary_image_resolved, resolved_fbc_fragment, - overwrite_from_index_token, + payload.overwrite_from_index_token, ) _add_label_to_index( @@ -111,15 +104,15 @@ def handle_fbc_operation_request( _push_image(request_id, arch) set_request_state(request_id, 'in_progress', 'Creating the manifest list') - output_pull_spec = _create_and_push_manifest_list(request_id, arches, build_tags) + output_pull_spec = _create_and_push_manifest_list(request_id, arches, payload.build_tags) _update_index_image_pull_spec( output_pull_spec, request_id, arches, - from_index, - overwrite_from_index, - overwrite_from_index_token, + payload.from_index, + payload.overwrite_from_index, + payload.overwrite_from_index_token, from_index_resolved, add_or_rm=True, ) diff --git a/iib/workers/tasks/build_merge_index_image.py b/iib/workers/tasks/build_merge_index_image.py index 57ce9d27..642c3d1d 100644 --- a/iib/workers/tasks/build_merge_index_image.py +++ b/iib/workers/tasks/build_merge_index_image.py @@ -43,6 +43,7 @@ RequestConfigMerge, ) from iib.workers.tasks.iib_static_types import BundleImage +from iib.common.pydantic_models import MergeIndexImagePydanticModel __all__ = ['handle_merge_request'] @@ -192,18 +193,9 @@ def _add_bundles_missing_in_source( @app.task @request_logger def handle_merge_request( - source_from_index: str, - deprecation_list: List[str], + payload: MergeIndexImagePydanticModel, request_id: int, - binary_image: Optional[str] = None, - target_index: Optional[str] = None, - overwrite_target_index: bool = False, - overwrite_target_index_token: Optional[str] = None, - distribution_scope: Optional[str] = None, binary_image_config: Optional[str] = None, - build_tags: Optional[List[str]] = None, - graph_update_mode: Optional[str] = None, - ignore_bundle_ocp_version: Optional[bool] = False, ) -> None: """ Coordinate the work needed to merge old (N) index image with new (N+1) index image. @@ -232,15 +224,15 @@ def handle_merge_request( :raises IIBError: if the index image merge fails. """ _cleanup() - with set_registry_token(overwrite_target_index_token, target_index, append=True): + with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): prebuild_info = prepare_request_for_build( request_id, RequestConfigMerge( - _binary_image=binary_image, - overwrite_target_index_token=overwrite_target_index_token, - source_from_index=source_from_index, - target_index=target_index, - distribution_scope=distribution_scope, + _binary_image=payload.binary_image, + overwrite_target_index_token=payload.overwrite_target_index_token, + source_from_index=payload.source_from_index, + target_index=payload.target_index, + distribution_scope=payload.distribution_scope, binary_image_config=binary_image_config, ), ) @@ -250,7 +242,7 @@ def handle_merge_request( dockerfile_name = 'index.Dockerfile' with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: - with set_registry_token(overwrite_target_index_token, target_index, append=True): + with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): source_fbc = is_image_fbc(source_from_index_resolved) target_fbc = is_image_fbc(target_index_resolved) @@ -270,16 +262,16 @@ def handle_merge_request( set_request_state(request_id, 'in_progress', 'Getting bundles present in the index images') log.info('Getting bundles present in the source index image') - with set_registry_token(overwrite_target_index_token, target_index, append=True): + with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): source_index_bundles, source_index_bundles_pull_spec = _get_present_bundles( source_from_index_resolved, temp_dir ) target_index_bundles: List[BundleImage] = [] - if target_index: + if payload.target_index: log.info('Getting bundles present in the target index image') with set_registry_token( - overwrite_target_index_token, target_index_resolved, append=True + payload.overwrite_target_index_token, target_index_resolved, append=True ): target_index_bundles, _ = _get_present_bundles(target_index_resolved, temp_dir) @@ -295,11 +287,11 @@ def handle_merge_request( request_id=request_id, arch=arch, ocp_version=prebuild_info['target_ocp_version'], - graph_update_mode=graph_update_mode, - target_index=target_index, - overwrite_target_index_token=overwrite_target_index_token, + graph_update_mode=payload.graph_update_mode, + target_index=payload.target_index, + overwrite_target_index_token=payload.overwrite_target_index_token, distribution_scope=prebuild_info['distribution_scope'], - ignore_bundle_ocp_version=ignore_bundle_ocp_version, + ignore_bundle_ocp_version=payload.ignore_bundle_ocp_version, ) missing_bundle_paths = [bundle['bundlePath'] for bundle in missing_bundles] @@ -309,7 +301,7 @@ def handle_merge_request( log.info('Deprecating bundles in the deprecation list') intermediate_bundles = missing_bundle_paths + source_index_bundles_pull_spec deprecation_bundles = get_bundles_from_deprecation_list( - intermediate_bundles, deprecation_list + intermediate_bundles, payload.deprecation_list ) # We do not need to pass the invalid_version_bundles through the # get_bundles_from_deprecation_list function because we already know @@ -346,7 +338,7 @@ def handle_merge_request( base_dir=temp_dir, binary_image=prebuild_info['binary_image'], from_index=intermediate_image_name, - overwrite_target_index_token=overwrite_target_index_token, + overwrite_target_index_token=payload.overwrite_target_index_token, ) if target_fbc: @@ -396,15 +388,15 @@ def handle_merge_request( ) output_pull_spec = _create_and_push_manifest_list( - request_id, prebuild_info['arches'], build_tags + request_id, prebuild_info['arches'], payload.build_tags ) _update_index_image_pull_spec( output_pull_spec, request_id, prebuild_info['arches'], - target_index, - overwrite_target_index, - overwrite_target_index_token, + payload.target_index, + payload.overwrite_target_index, + payload.overwrite_target_index_token, target_index_resolved, ) _cleanup() diff --git a/iib/workers/tasks/build_recursive_related_bundles.py b/iib/workers/tasks/build_recursive_related_bundles.py index d841832c..7362336b 100644 --- a/iib/workers/tasks/build_recursive_related_bundles.py +++ b/iib/workers/tasks/build_recursive_related_bundles.py @@ -3,7 +3,7 @@ import logging import os import tempfile -from typing import Any, Dict, List, Optional +from typing import List, Optional from operator_manifest.operator import OperatorManifest import ruamel.yaml @@ -29,6 +29,7 @@ get_bundle_metadata, ) from iib.workers.tasks.iib_static_types import UpdateRequestPayload +from iib.common.pydantic_models import RecursiveRelatedBundlesPydanticModel __all__ = ['handle_recursive_related_bundles_request'] @@ -49,10 +50,8 @@ @app.task @request_logger def handle_recursive_related_bundles_request( - parent_bundle_image: str, - organization: str, + payload: RecursiveRelatedBundlesPydanticModel, request_id: int, - registry_auths: Optional[Dict[str, Any]] = None, ) -> None: """ Coordinate the work needed to find recursive related bundles of the operator bundle image. @@ -69,14 +68,14 @@ def handle_recursive_related_bundles_request( set_request_state(request_id, 'in_progress', 'Resolving parent_bundle_image') - with set_registry_auths(registry_auths): - parent_bundle_image_resolved = get_resolved_image(parent_bundle_image) + with set_registry_auths(payload.registry_auths): + parent_bundle_image_resolved = get_resolved_image(payload.parent_bundle_image) payload: UpdateRequestPayload = { 'parent_bundle_image_resolved': parent_bundle_image_resolved, 'state': 'in_progress', 'state_reason': ( - f'Finding recursive related bundles for the bundle: {parent_bundle_image}' + f'Finding recursive related bundles for the bundle: {payload.parent_bundle_image}' ), } update_request(request_id, payload) @@ -91,7 +90,7 @@ def handle_recursive_related_bundles_request( current_level_related_bundles = [] for bundle in temp_current_level_related_bundles: children_related_bundles = process_parent_bundle_image( - bundle, request_id, organization + bundle, request_id, payload.organization ) current_level_related_bundles.extend(children_related_bundles) diff --git a/iib/workers/tasks/build_regenerate_bundle.py b/iib/workers/tasks/build_regenerate_bundle.py index 81cbdfb4..411c0ca6 100644 --- a/iib/workers/tasks/build_regenerate_bundle.py +++ b/iib/workers/tasks/build_regenerate_bundle.py @@ -32,6 +32,7 @@ get_bundle_metadata, ) from iib.workers.tasks.iib_static_types import BundleMetadata, UpdateRequestPayload +from iib.common.pydantic_models import RegenerateBundlePydanticModel __all__ = ['handle_regenerate_bundle_request'] @@ -52,11 +53,8 @@ @app.task @request_logger def handle_regenerate_bundle_request( - from_bundle_image: str, - organization: str, + payload: RegenerateBundlePydanticModel, request_id: int, - registry_auths: Optional[Dict[str, Any]] = None, - bundle_replacements: Optional[Dict[str, str]] = {}, ) -> None: """ Coordinate the work needed to regenerate the operator bundle image. @@ -74,8 +72,8 @@ def handle_regenerate_bundle_request( set_request_state(request_id, 'in_progress', 'Resolving from_bundle_image') - with set_registry_auths(registry_auths): - from_bundle_image_resolved = get_resolved_image(from_bundle_image) + with set_registry_auths(payload.registry_auths): + from_bundle_image_resolved = get_resolved_image(payload.from_bundle_image) arches: Set[str] = get_image_arches(from_bundle_image_resolved) if not arches: @@ -111,9 +109,9 @@ def handle_regenerate_bundle_request( manifests_path, metadata_path, request_id, - organization=organization, + organization=payload.organization, pinned_by_iib=pinned_by_iib, - bundle_replacements=bundle_replacements, + bundle_replacements=payload.bundle_replacements, ) with open(os.path.join(temp_dir, 'Dockerfile'), 'w') as dockerfile: From 7123e63e8907c986ef525cd6d223b930f49073fe Mon Sep 17 00:00:00 2001 From: xdaile Date: Tue, 16 Jan 2024 19:44:07 +0100 Subject: [PATCH 3/6] Fix differences to iib without Pydantic --- iib/common/pydantic_models.py | 25 +++++++++++++------------ iib/web/api_v1.py | 1 - 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/iib/common/pydantic_models.py b/iib/common/pydantic_models.py index 7a819bc4..12dae6ae 100644 --- a/iib/common/pydantic_models.py +++ b/iib/common/pydantic_models.py @@ -66,7 +66,7 @@ class AddPydanticModel(PydanticModel): AfterValidator(images_format_check), ] cnr_token: Optional[SecretStr] = None # deprecated - check_related_images: Optional[bool] = False + check_related_images: Optional[bool] = None # old request without this parameter will not have False but None deprecation_list: Annotated[ Optional[List[str]], AfterValidator(get_unique_deprecation_list_items), @@ -123,6 +123,7 @@ def bundles_needed_with_check_related_images(self) -> 'AddPydanticModel': def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( + # include=["deprecation_list"], exclude=[ "add_arches", "build_tags", @@ -131,7 +132,7 @@ def get_json_for_request(self): "overwrite_from_index", "overwrite_from_index_token", ], - exclude_defaults=True, + exclude_none=True, ) @@ -167,7 +168,7 @@ def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], - exclude_defaults=True, + exclude_none=True, ) def _get_all_keys_to_check_in_db(self): @@ -191,7 +192,7 @@ class RegenerateBundlePydanticModel(PydanticModel): """Datastructure of the request to /builds/regenerate-bundle API point.""" # BUNDLE_IMAGE, from_bundle_image_resolved, build_tags? - bundle_replacements: Optional[Dict[str, str]] = {} + bundle_replacements: Optional[Dict[str, str]] = None from_bundle_image: Annotated[str, AfterValidator(image_format_check)] organization: Optional[str] = None registry_auths: Optional[RegistryAuths] = None # not in db @@ -200,7 +201,7 @@ def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( exclude=["registry_auths"], - exclude_defaults=True, + exclude_none=True, ) def _get_all_keys_to_check_in_db(self): @@ -255,7 +256,7 @@ def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( exclude=["build_tags", "overwrite_target_index", "overwrite_target_index_token"], - exclude_defaults=True, + exclude_none=True, ) def _get_all_keys_to_check_in_db(self): @@ -275,13 +276,13 @@ class CreateEmptyIndexPydanticModel(PydanticModel): AfterValidator(image_format_check), AfterValidator(length_validator), ] - labels: Optional[Dict[str, str]] = {} - output_fbc: Optional[bool] = False + labels: Optional[Dict[str, str]] = None # old request without this parameter will not have empty labels + output_fbc: Optional[bool] = None # old request without this parameter will not have empty output_fbc def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( - exclude_defaults=True, + exclude_none=True, ) def _get_all_keys_to_check_in_db(self): @@ -301,7 +302,7 @@ def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( exclude=["registry_auths"], - exclude_defaults=True, + exclude_none=True, ) @@ -321,7 +322,7 @@ class FbcOperationsPydanticModel(PydanticModel): AfterValidator(length_validator), AfterValidator(get_unique_bundles), AfterValidator(images_format_check), - ] = [] + ] = None # old request without this parameter will not have empty list but None build_tags: Optional[List[str]] = [] distribution_scope: Annotated[ Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), @@ -349,7 +350,7 @@ def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], - exclude_defaults=True, + exclude_none=True, ) def _get_all_keys_to_check_in_db(self): diff --git a/iib/web/api_v1.py b/iib/web/api_v1.py index 42d8b860..642f1935 100644 --- a/iib/web/api_v1.py +++ b/iib/web/api_v1.py @@ -1153,7 +1153,6 @@ def fbc_operations() -> Tuple[flask.Response, int]: request = RequestFbcOperations.from_json_replacement( payload=request_payload, - batch_disabled=True, build_tags_allowed=True, ) From 9675d0a8fd9cd64201286263b52139dbe9ecce31 Mon Sep 17 00:00:00 2001 From: xdaile Date: Thu, 18 Jan 2024 08:54:56 +0100 Subject: [PATCH 4/6] Black Lint fixes --- iib/common/pydantic_models.py | 61 ++++++++++++------- iib/common/pydantic_utils.py | 27 ++++---- iib/web/api_v1.py | 43 ++++++------- iib/web/models.py | 35 +++++------ iib/workers/tasks/build.py | 32 ++++++++-- iib/workers/tasks/build_create_empty_index.py | 2 +- iib/workers/tasks/build_fbc_operations.py | 3 +- iib/workers/tasks/build_merge_index_image.py | 20 ++++-- .../tasks/build_recursive_related_bundles.py | 2 +- iib/workers/tasks/build_regenerate_bundle.py | 2 +- 10 files changed, 137 insertions(+), 90 deletions(-) diff --git a/iib/common/pydantic_models.py b/iib/common/pydantic_models.py index 12dae6ae..f50f4784 100644 --- a/iib/common/pydantic_models.py +++ b/iib/common/pydantic_models.py @@ -37,16 +37,14 @@ class PydanticModel(BaseModel): - @classmethod def _get_all_keys_to_check_in_db(cls): + """Class that returns request specific keys to check.""" raise NotImplementedError("Not implemented") def get_keys_to_check_in_db(self): """Filter keys, which need to be checked in db. Return only a keys that are set to values.""" - return [ - k for k in self._get_all_keys_to_check_in_db() if getattr(self, k, None) - ] + return [k for k in self._get_all_keys_to_check_in_db() if getattr(self, k, None)] class AddPydanticModel(PydanticModel): @@ -66,14 +64,16 @@ class AddPydanticModel(PydanticModel): AfterValidator(images_format_check), ] cnr_token: Optional[SecretStr] = None # deprecated - check_related_images: Optional[bool] = None # old request without this parameter will not have False but None + # TODO remove this comment -> old request without this parameter will not have False but None + check_related_images: Optional[bool] = None deprecation_list: Annotated[ Optional[List[str]], AfterValidator(get_unique_deprecation_list_items), AfterValidator(images_format_check), ] = [] # deprecated distribution_scope: Annotated[ - Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + Optional[DISTRIBUTION_SCOPE_LITERAL], + BeforeValidator(distribution_scope_lower), ] = None force_backport: Optional[bool] = False # deprecated from_index: Annotated[str, AfterValidator(image_format_check)] @@ -102,7 +102,7 @@ def verify_graph_update_mode_with_index_image(self) -> 'AddPydanticModel': @model_validator(mode='after') def from_index_needed_if_no_bundles(self) -> 'AddPydanticModel': """ - Check if no bundles and `from_index is specified + Check if no bundles and `from_index is specified. if no bundles and no from index then an empty index will be created which is a no-op """ @@ -113,7 +113,7 @@ def from_index_needed_if_no_bundles(self) -> 'AddPydanticModel': # TODO remove this comment -> Validator from RequestADD class @model_validator(mode='after') def bundles_needed_with_check_related_images(self) -> 'AddPydanticModel': - """Verify that `check_related_images` is specified when bundles are specified""" + """Verify that `check_related_images` is specified when bundles are specified.""" if self.check_related_images and not self.bundles: raise ValidationError( '"check_related_images" must be specified only when bundles are specified' @@ -123,7 +123,6 @@ def bundles_needed_with_check_related_images(self) -> 'AddPydanticModel': def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( - # include=["deprecation_list"], exclude=[ "add_arches", "build_tags", @@ -135,7 +134,6 @@ def get_json_for_request(self): exclude_none=True, ) - def _get_all_keys_to_check_in_db(self): return ["binary_image", "bundles", "deprecation_list", "from_index"] @@ -150,7 +148,8 @@ class RmPydanticModel(PydanticModel): ] = None build_tags: Optional[List[str]] = [] distribution_scope: Annotated[ - Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + Optional[DISTRIBUTION_SCOPE_LITERAL], + BeforeValidator(distribution_scope_lower), ] = None from_index: Annotated[str, AfterValidator(image_format_check)] operators: Annotated[List[str], AfterValidator(length_validator)] @@ -167,7 +166,12 @@ def verify_overwrite_from_index_token(self) -> 'RmPydanticModel': def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( - exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], + exclude=[ + "add_arches", + "build_tags", + "overwrite_from_index", + "overwrite_from_index_token", + ], exclude_none=True, ) @@ -228,10 +232,11 @@ class MergeIndexImagePydanticModel(PydanticModel): AfterValidator(images_format_check), ] = [] distribution_scope: Annotated[ - Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + Optional[DISTRIBUTION_SCOPE_LITERAL], + BeforeValidator(distribution_scope_lower), ] = None graph_update_mode: Optional[GRAPH_MODE_LITERAL] = None - overwrite_target_index: Optional[bool] = False # Why do we need this bool? Isn't the token enough? + overwrite_target_index: Optional[bool] = False overwrite_target_index_token: Optional[SecretStr] = None source_from_index: Annotated[str, AfterValidator(image_format_check)] target_index: Annotated[Optional[str], AfterValidator(image_format_check)] = None @@ -260,7 +265,13 @@ def get_json_for_request(self): ) def _get_all_keys_to_check_in_db(self): - return ["binary_image", "deprecation_list", "source_from_index", "target_index", "target_index"] + return [ + "binary_image", + "deprecation_list", + "source_from_index", + "target_index", + "target_index", + ] class CreateEmptyIndexPydanticModel(PydanticModel): @@ -276,8 +287,10 @@ class CreateEmptyIndexPydanticModel(PydanticModel): AfterValidator(image_format_check), AfterValidator(length_validator), ] - labels: Optional[Dict[str, str]] = None # old request without this parameter will not have empty labels - output_fbc: Optional[bool] = None # old request without this parameter will not have empty output_fbc + # TODO (remove comment) old request without this parameter will not have empty labels + labels: Optional[Dict[str, str]] = None + # TODO (remove comment) old request without this parameter will not have empty output_fbc + output_fbc: Optional[bool] = None def get_json_for_request(self): """Return json with the parameters we store in the db.""" @@ -305,7 +318,6 @@ def get_json_for_request(self): exclude_none=True, ) - def _get_all_keys_to_check_in_db(self): return ["parent_bundle_image"] @@ -317,15 +329,17 @@ class FbcOperationsPydanticModel(PydanticModel): AfterValidator(image_format_check), AfterValidator(binary_image_check), ] = None + # TODO (remove comment) old request without this parameter will not have empty list but None bundles: Annotated[ Optional[List[str]], AfterValidator(length_validator), AfterValidator(get_unique_bundles), AfterValidator(images_format_check), - ] = None # old request without this parameter will not have empty list but None + ] = None build_tags: Optional[List[str]] = [] distribution_scope: Annotated[ - Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), + Optional[DISTRIBUTION_SCOPE_LITERAL], + BeforeValidator(distribution_scope_lower), ] = None fbc_fragment: Annotated[ str, @@ -349,7 +363,12 @@ def verify_overwrite_from_index_token(self) -> 'FbcOperationsPydanticModel': def get_json_for_request(self): """Return json with the parameters we store in the db.""" return self.model_dump( - exclude=["add_arches", "build_tags", "overwrite_from_index", "overwrite_from_index_token"], + exclude=[ + "add_arches", + "build_tags", + "overwrite_from_index", + "overwrite_from_index_token", + ], exclude_none=True, ) diff --git a/iib/common/pydantic_utils.py b/iib/common/pydantic_utils.py index 968bbef5..83951b87 100644 --- a/iib/common/pydantic_utils.py +++ b/iib/common/pydantic_utils.py @@ -15,9 +15,7 @@ # TODO add regex in future to not allow following values ":s", "s:", ":"? def image_format_check(image_name: str) -> str: if '@' not in image_name and ':' not in image_name: - raise ValidationError( - f'Image {image_name} should have a tag or a digest specified.' - ) + raise ValidationError(f'Image {image_name} should have a tag or a digest specified.') return image_name @@ -48,7 +46,10 @@ def get_unique_deprecation_list_items(deprecation_list: Optional[List[str]]) -> return list(set(deprecation_list)) -def validate_graph_mode_index_image(graph_update_mode: str, index_image: str) -> 'MergeIndexImageRequestPayload': +def validate_graph_mode_index_image( + graph_update_mode: str, + index_image: str, +) -> 'MergeIndexImageRequestPayload': """ Validate graph mode and check if index image is allowed to use different graph mode. @@ -57,9 +58,9 @@ def validate_graph_mode_index_image(graph_update_mode: str, index_image: str) -> :raises: ValidationError when incorrect graph_update_mode is set :raises: Forbidden when graph_mode can't be used for given index image """ - if graph_update_mode: - allowed_from_indexes: List[str] = ["REMOVE_#:r"] # current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] + # TODO remove this comment, replace value with current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] + allowed_from_indexes: List[str] = ["REMOVE_#:r"] if index_image not in allowed_from_indexes: raise Forbidden( '"graph_update_mode" can only be used on the' @@ -70,9 +71,7 @@ def validate_graph_mode_index_image(graph_update_mode: str, index_image: str) -> # RequestIndexImageMixin def from_index_add_arches(model: 'AddRequestPydanticModel') -> 'AddRequestPydanticModel': - """ - Check if both `from_index` and `add_arches` are not specified - """ + """Check if both `from_index` and `add_arches` are not specified.""" if not model.from_index and not model.add_arches: raise ValidationError('One of "from_index" or "add_arches" must be specified') return model @@ -81,7 +80,7 @@ def from_index_add_arches(model: 'AddRequestPydanticModel') -> 'AddRequestPydant # RequestIndexImageMixin def binary_image_check(binary_image: str) -> str: """ - # Validate binary_image is correctly provided + # Validate binary_image is correctly provided. """ if not binary_image and not current_app.config['IIB_BINARY_IMAGE_CONFIG']: raise ValidationError('The "binary_image" value must be a non-empty string') @@ -95,9 +94,7 @@ def validate_overwrite_params( disable_auth_check: Optional[bool] = False, ) -> None: """ - Check if both `overwrite_index_image` and `overwrite_index_image_token` are specified - - + Check if both `overwrite_index_image` and `overwrite_index_image_token` are specified. """ if overwrite_index_image_token and not overwrite_index_image: raise ValidationError( @@ -122,5 +119,7 @@ def distribution_scope_lower(distribution_scope: str) -> str: def length_validator(model_property: Any) -> Any: if len(model_property) == 0: - raise ValidationError(f"The {type(model_property)} {model_property} should have at least 1 item.") + raise ValidationError( + f"The {type(model_property)} {model_property} should have at least 1 item." + ) return model_property diff --git a/iib/web/api_v1.py b/iib/web/api_v1.py index 642f1935..917c3c9b 100644 --- a/iib/web/api_v1.py +++ b/iib/web/api_v1.py @@ -452,10 +452,10 @@ def add_bundles() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - try: request_payload = AddPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -463,7 +463,6 @@ def add_bundles() -> Tuple[flask.Response, int]: request = RequestAdd.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) db.session.commit() @@ -690,7 +689,8 @@ def rm_operators() -> Tuple[flask.Response, int]: """ try: request_payload = RmPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -698,7 +698,6 @@ def rm_operators() -> Tuple[flask.Response, int]: request = RequestRm.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) db.session.commit() @@ -738,7 +737,8 @@ def regenerate_bundle() -> Tuple[flask.Response, int]: """ try: request_payload = RegenerateBundlePydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -746,7 +746,6 @@ def regenerate_bundle() -> Tuple[flask.Response, int]: request = RequestRegenerateBundle.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) @@ -781,10 +780,10 @@ def regenerate_bundle_batch() -> Tuple[flask.Response, int]: :rtype: flask.Response :raise ValidationError: if required parameters are not supplied """ - try: request_payload_batch = RegenerateBundleBatchPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -800,7 +799,6 @@ def regenerate_bundle_batch() -> Tuple[flask.Response, int]: request = RequestRegenerateBundle.from_json_replacement( payload=request_payload, batch=batch, - build_tags_allowed=True, ) db.session.add(request) requests.append(request) @@ -853,7 +851,8 @@ def add_rm_batch() -> Tuple[flask.Response, int]: """ try: request_payload_batch = AddRmBatchPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -870,13 +869,11 @@ def add_rm_batch() -> Tuple[flask.Response, int]: request = RequestAdd.from_json_replacement( payload=request_payload, batch=batch, - build_tags_allowed=True, ) else: request = RequestRm.from_json_replacement( payload=request_payload, batch=batch, - build_tags_allowed=True, ) db.session.add(request) @@ -946,7 +943,8 @@ def merge_index_image() -> Tuple[flask.Response, int]: """ try: request_payload = MergeIndexImagePydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -954,7 +952,6 @@ def merge_index_image() -> Tuple[flask.Response, int]: request = RequestMergeIndexImage.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) @@ -970,9 +967,7 @@ def merge_index_image() -> Tuple[flask.Response, int]: error_callback = failed_request_callback.s(request.id) try: - handle_merge_request.apply_async( - args=args, link_error=error_callback, queue=celery_queue - ) + handle_merge_request.apply_async(args=args, link_error=error_callback, queue=celery_queue) except kombu.exceptions.OperationalError: handle_broker_error(request) @@ -993,7 +988,8 @@ def create_empty_index() -> Tuple[flask.Response, int]: """ try: request_payload = CreateEmptyIndexPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -1001,7 +997,6 @@ def create_empty_index() -> Tuple[flask.Response, int]: request = RequestCreateEmptyIndex.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) @@ -1039,7 +1034,8 @@ def recursive_related_bundles() -> Tuple[flask.Response, int]: """ try: request_payload = RecursiveRelatedBundlesPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -1047,7 +1043,6 @@ def recursive_related_bundles() -> Tuple[flask.Response, int]: request = RequestRecursiveRelatedBundles.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) @@ -1145,7 +1140,8 @@ def fbc_operations() -> Tuple[flask.Response, int]: """ try: request_payload = FbcOperationsPydanticModel.model_validate( - flask.request.get_json(), strict=True, + flask.request.get_json(), + strict=True, ) except ValidationError as e: # If the JSON data doesn't match the Pydantic model, return a 400 Bad Request response @@ -1153,7 +1149,6 @@ def fbc_operations() -> Tuple[flask.Response, int]: request = RequestFbcOperations.from_json_replacement( payload=request_payload, - build_tags_allowed=True, ) db.session.add(request) diff --git a/iib/web/models.py b/iib/web/models.py index 4f97223d..02918cd8 100644 --- a/iib/web/models.py +++ b/iib/web/models.py @@ -1,11 +1,9 @@ # SPDX-License-Identifier: GPL-3.0-or-later from __future__ import annotations -from copy import deepcopy from datetime import datetime, timedelta from enum import Enum import json -from typing import Any, cast, Dict, List, Literal, Optional, Sequence, Set, Union -from abc import abstractmethod +from typing import Any, cast, Dict, List, Optional, Set, Union from flask import current_app, url_for from flask_login import UserMixin, current_user @@ -14,7 +12,6 @@ from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import joinedload, load_only, Mapped, validates from sqlalchemy.orm.strategy_options import _AbstractLoad -from werkzeug.exceptions import Forbidden from iib.exceptions import ValidationError from iib.web import db @@ -404,7 +401,6 @@ class Request(db.Model): 'polymorphic_on': 'type', } - @validates('type') def validate_type(self, key: Optional[str], type_num: int) -> int: """ @@ -490,7 +486,6 @@ def from_json_replacement( cls, payload: UnionPydanticRequestType, batch: Optional[Batch] = None, - build_tags_allowed: Optional[bool] = False, ): """ Handle JSON requests for the builds/* API endpoint. @@ -498,7 +493,6 @@ def from_json_replacement( :param UnionPydanticRequestType payload: the Pydantic model representing the request. :param Batch batch: the batch to specify with the request. """ - keys_to_check = payload.get_keys_to_check_in_db() for key in keys_to_check: if key in [ @@ -510,15 +504,25 @@ def from_json_replacement( 'target_index', 'parent_bundle_image', ]: - payload.__setattr__(key, Image.get_or_create(pull_specification=payload.__getattribute__(key))) + payload.__setattr__( + key, + Image.get_or_create(pull_specification=payload.__getattribute__(key)), + ) elif key in ["bundles", "deprecation_list"]: - payload.__setattr__(key, [ - Image.get_or_create(pull_specification=image) for image in payload.__getattribute__(key) - ]) + payload.__setattr__( + key, + [ + Image.get_or_create(pull_specification=image) + for image in payload.__getattribute__(key) + ], + ) elif key == "operators": - payload.__setattr__(key, [Operator.get_or_create(name=item) for item in payload.__getattribute__(key)]) + payload.__setattr__( + key, + [Operator.get_or_create(name=item) for item in payload.__getattribute__(key)], + ) else: raise ValidationError(f"Unexpected key: {key} during from_json() method.") @@ -537,14 +541,12 @@ def from_json_replacement( request = cls(**request_kwargs) - if build_tags_allowed: - for bt in payload.build_tags: - request.add_build_tag(bt) + for bt in payload.build_tags: + request.add_build_tag(bt) request.add_state('in_progress', 'The request was initiated') return request - # return value is BaseClassRequestResponse, however because of LSP, we need other types here too def to_json( self, @@ -906,7 +908,6 @@ def distribution_scope(cls: DefaultMeta) -> Mapped[str]: """Return the distribution_scope for the request.""" return db.mapped_column(db.String, nullable=True) - def get_common_index_image_json(self) -> CommonIndexImageResponseBase: """ Return the common set of attributes for an index image request. diff --git a/iib/workers/tasks/build.py b/iib/workers/tasks/build.py index 32a3f1b1..4531614c 100644 --- a/iib/workers/tasks/build.py +++ b/iib/workers/tasks/build.py @@ -17,6 +17,7 @@ wait_incrementing, ) +from iib.common.pydantic_models import AddPydanticModel, RmPydanticModel from iib.exceptions import IIBError, ExternalServiceError from iib.workers.api_utils import set_request_state, update_request from iib.workers.config import get_worker_config @@ -58,7 +59,6 @@ GreenwaveConfig, UpdateRequestPayload, ) -from iib.common.pydantic_models import AddPydanticModel, RmPydanticModel __all__ = ['handle_add_request', 'handle_rm_request'] @@ -880,7 +880,11 @@ def handle_add_request( # Resolve bundles to their digests set_request_state(request_id, 'in_progress', 'Resolving the bundles') - with set_registry_token(payload.overwrite_from_index_token, payload.from_index, append=True): + with set_registry_token( + payload.overwrite_from_index_token, + payload.from_index, + append=True, + ): resolved_bundles = get_resolved_bundles(payload.bundles) verify_labels(resolved_bundles) if payload.check_related_images: @@ -926,7 +930,11 @@ def handle_add_request( log.info(msg) set_request_state(request_id, 'in_progress', msg) - with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token( + payload.overwrite_from_index_token, + from_index_resolved, + append=True, + ): present_bundles, present_bundles_pull_spec = _get_present_bundles( from_index_resolved, temp_dir ) @@ -1033,7 +1041,11 @@ def handle_add_request( ) # get catalog with opted-in operators os.makedirs(os.path.join(temp_dir, 'from_index'), exist_ok=True) - with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token( + payload.overwrite_from_index_token, + from_index_resolved, + append=True, + ): catalog_from_index = get_catalog_dir( from_index=from_index_resolved, base_dir=os.path.join(temp_dir, 'from_index') ) @@ -1152,7 +1164,11 @@ def handle_rm_request( from_index_resolved = prebuild_info['from_index_resolved'] with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: - with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token( + payload.overwrite_from_index_token, + from_index_resolved, + append=True, + ): image_is_fbc = is_image_fbc(from_index_resolved) if image_is_fbc: @@ -1173,7 +1189,11 @@ def handle_rm_request( os.makedirs(os.path.join(temp_dir, 'from_index'), exist_ok=True) # get catalog with opted-in operators - with set_registry_token(payload.overwrite_from_index_token, from_index_resolved, append=True): + with set_registry_token( + payload.overwrite_from_index_token, + from_index_resolved, + append=True, + ): catalog_from_index = get_catalog_dir( from_index=from_index_resolved, base_dir=os.path.join(temp_dir, 'from_index') ) diff --git a/iib/workers/tasks/build_create_empty_index.py b/iib/workers/tasks/build_create_empty_index.py index 85e3eb6c..92429841 100644 --- a/iib/workers/tasks/build_create_empty_index.py +++ b/iib/workers/tasks/build_create_empty_index.py @@ -5,6 +5,7 @@ import re from typing import Dict, List, Optional +from iib.common.pydantic_models import CreateEmptyIndexPydanticModel from iib.exceptions import IIBError from iib.workers.api_utils import set_request_state from iib.workers.tasks.build import ( @@ -27,7 +28,6 @@ grpcurl_get_db_data, ) from iib.workers.tasks.iib_static_types import PrebuildInfo -from iib.common.pydantic_models import CreateEmptyIndexPydanticModel __all__ = ['handle_create_empty_index_request'] diff --git a/iib/workers/tasks/build_fbc_operations.py b/iib/workers/tasks/build_fbc_operations.py index c2323559..84d2e978 100644 --- a/iib/workers/tasks/build_fbc_operations.py +++ b/iib/workers/tasks/build_fbc_operations.py @@ -3,6 +3,7 @@ import tempfile from typing import Dict, Optional +from iib.common.pydantic_models import FbcOperationsPydanticModel from iib.workers.api_utils import set_request_state from iib.workers.tasks.build import ( _add_label_to_index, @@ -22,7 +23,7 @@ set_registry_token, RequestConfigFBCOperation, ) -from iib.common.pydantic_models import FbcOperationsPydanticModel + __all__ = ['handle_fbc_operation_request'] log = logging.getLogger(__name__) diff --git a/iib/workers/tasks/build_merge_index_image.py b/iib/workers/tasks/build_merge_index_image.py index 642c3d1d..dc8e84f9 100644 --- a/iib/workers/tasks/build_merge_index_image.py +++ b/iib/workers/tasks/build_merge_index_image.py @@ -15,6 +15,7 @@ ) from packaging.version import Version +from iib.common.pydantic_models import MergeIndexImagePydanticModel from iib.exceptions import IIBError from iib.workers.api_utils import set_request_state from iib.workers.tasks.build import ( @@ -43,7 +44,6 @@ RequestConfigMerge, ) from iib.workers.tasks.iib_static_types import BundleImage -from iib.common.pydantic_models import MergeIndexImagePydanticModel __all__ = ['handle_merge_request'] @@ -224,7 +224,11 @@ def handle_merge_request( :raises IIBError: if the index image merge fails. """ _cleanup() - with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): + with set_registry_token( + payload.overwrite_target_index_token, + payload.target_index, + append=True, + ): prebuild_info = prepare_request_for_build( request_id, RequestConfigMerge( @@ -242,7 +246,11 @@ def handle_merge_request( dockerfile_name = 'index.Dockerfile' with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: - with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): + with set_registry_token( + payload.overwrite_target_index_token, + payload.target_index, + append=True, + ): source_fbc = is_image_fbc(source_from_index_resolved) target_fbc = is_image_fbc(target_index_resolved) @@ -262,7 +270,11 @@ def handle_merge_request( set_request_state(request_id, 'in_progress', 'Getting bundles present in the index images') log.info('Getting bundles present in the source index image') - with set_registry_token(payload.overwrite_target_index_token, payload.target_index, append=True): + with set_registry_token( + payload.overwrite_target_index_token, + payload.target_index, + append=True, + ): source_index_bundles, source_index_bundles_pull_spec = _get_present_bundles( source_from_index_resolved, temp_dir ) diff --git a/iib/workers/tasks/build_recursive_related_bundles.py b/iib/workers/tasks/build_recursive_related_bundles.py index 7362336b..4c83bb23 100644 --- a/iib/workers/tasks/build_recursive_related_bundles.py +++ b/iib/workers/tasks/build_recursive_related_bundles.py @@ -8,6 +8,7 @@ from operator_manifest.operator import OperatorManifest import ruamel.yaml +from iib.common.pydantic_models import RecursiveRelatedBundlesPydanticModel from iib.exceptions import IIBError from iib.workers.api_utils import set_request_state, update_request from iib.workers.tasks.build import ( @@ -29,7 +30,6 @@ get_bundle_metadata, ) from iib.workers.tasks.iib_static_types import UpdateRequestPayload -from iib.common.pydantic_models import RecursiveRelatedBundlesPydanticModel __all__ = ['handle_recursive_related_bundles_request'] diff --git a/iib/workers/tasks/build_regenerate_bundle.py b/iib/workers/tasks/build_regenerate_bundle.py index 411c0ca6..3feaa496 100644 --- a/iib/workers/tasks/build_regenerate_bundle.py +++ b/iib/workers/tasks/build_regenerate_bundle.py @@ -9,6 +9,7 @@ from operator_manifest.operator import ImageName, OperatorManifest, OperatorCSV import ruamel.yaml +from iib.common.pydantic_models import RegenerateBundlePydanticModel from iib.exceptions import IIBError from iib.workers.s3_utils import upload_file_to_s3_bucket from iib.workers.api_utils import set_request_state, update_request @@ -32,7 +33,6 @@ get_bundle_metadata, ) from iib.workers.tasks.iib_static_types import BundleMetadata, UpdateRequestPayload -from iib.common.pydantic_models import RegenerateBundlePydanticModel __all__ = ['handle_regenerate_bundle_request'] From 2cc8c5f2e0f716d656f9468a0cc373115cb13ae8 Mon Sep 17 00:00:00 2001 From: xdaile Date: Thu, 1 Feb 2024 17:06:42 +0100 Subject: [PATCH 5/6] Minor fixes --- iib/common/pydantic_models.py | 74 ++++++++++++++----- iib/common/pydantic_utils.py | 30 ++++---- iib/web/iib_static_types.py | 2 +- iib/workers/tasks/build.py | 28 ++++--- .../tasks/build_recursive_related_bundles.py | 20 +++-- iib/workers/tasks/build_regenerate_bundle.py | 12 ++- 6 files changed, 110 insertions(+), 56 deletions(-) diff --git a/iib/common/pydantic_models.py b/iib/common/pydantic_models.py index f50f4784..fbe525a6 100644 --- a/iib/common/pydantic_models.py +++ b/iib/common/pydantic_models.py @@ -36,18 +36,24 @@ ] -class PydanticModel(BaseModel): +class PydanticRequestBaseModel(BaseModel): + """Base model representing IIB request.""" + @classmethod def _get_all_keys_to_check_in_db(cls): """Class that returns request specific keys to check.""" raise NotImplementedError("Not implemented") def get_keys_to_check_in_db(self): - """Filter keys, which need to be checked in db. Return only a keys that are set to values.""" + """ + Filter keys, which need to be checked in db. + + Return only a keys that are set to values. + """ return [k for k in self._get_all_keys_to_check_in_db() if getattr(self, k, None)] -class AddPydanticModel(PydanticModel): +class AddPydanticModel(PydanticRequestBaseModel): """Datastructure of the request to /builds/add API point.""" add_arches: Optional[List[str]] = None @@ -76,18 +82,22 @@ class AddPydanticModel(PydanticModel): BeforeValidator(distribution_scope_lower), ] = None force_backport: Optional[bool] = False # deprecated - from_index: Annotated[str, AfterValidator(image_format_check)] + from_index: Annotated[Optional[str], AfterValidator(image_format_check)] = None graph_update_mode: Optional[GRAPH_MODE_LITERAL] = None organization: Optional[str] = None # deprecated overwrite_from_index: Optional[bool] = False overwrite_from_index_token: Optional[SecretStr] = None - _from_index_add_arches_check = model_validator(mode='after')(from_index_add_arches) + @model_validator(mode='after') + def verify_from_index_add_arches_combination(self) -> 'AddPydanticModel': + """Check the 'overwrite_from_index' parameter with 'overwrite_from_index_token' param.""" + from_index_add_arches(self.from_index, self.add_arches) + return self # TODO remove this comment -> Validator from RequestIndexImageMixin class @model_validator(mode='after') def verify_overwrite_from_index_token(self) -> 'AddPydanticModel': - """Check the 'overwrite_from_index' parameter in combination with 'overwrite_from_index_token' parameter.""" + """Check the 'overwrite_from_index' parameter with 'overwrite_from_index_token' param.""" validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) return self @@ -138,7 +148,7 @@ def _get_all_keys_to_check_in_db(self): return ["binary_image", "bundles", "deprecation_list", "from_index"] -class RmPydanticModel(PydanticModel): +class RmPydanticModel(PydanticRequestBaseModel): """Datastructure of the request to /builds/rm API point.""" add_arches: Optional[List[str]] = None @@ -151,16 +161,24 @@ class RmPydanticModel(PydanticModel): Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), ] = None - from_index: Annotated[str, AfterValidator(image_format_check)] + from_index: Annotated[Optional[str], AfterValidator(image_format_check)] = None operators: Annotated[List[str], AfterValidator(length_validator)] overwrite_from_index: Optional[bool] = False overwrite_from_index_token: Optional[SecretStr] = None - _from_index_add_arches_check = model_validator(mode='after')(from_index_add_arches) + @model_validator(mode='after') + def verify_from_index_add_arches_combination(self) -> 'AddPydanticModel': + """Check the 'overwrite_from_index' parameter with 'overwrite_from_index_token' param.""" + from_index_add_arches(self.from_index, self.add_arches) + return self @model_validator(mode='after') def verify_overwrite_from_index_token(self) -> 'RmPydanticModel': - validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) + """Validate overwrite_from_index and overwrite_from_index_token param combination.""" + validate_overwrite_params( + self.overwrite_from_index, + self.overwrite_from_index_token, + ) return self def get_json_for_request(self): @@ -180,19 +198,30 @@ def _get_all_keys_to_check_in_db(self): class AddRmBatchPydanticModel(BaseModel): + """Datastructure of the request to /builds/add-rm-batch API point.""" + annotations: Dict[str, Any] build_requests: List[Union[AddPydanticModel, RmPydanticModel]] class RegistryAuth(BaseModel): + """Datastructure representing private registry token.""" + auth: SecretStr -class RegistryAuths(BaseModel): # is {"auths":{}} allowed? +class RegistryAuths(BaseModel): + """ + Datastructure used within recursive-related-bundles. + + Provide the dockerconfig.json for authentication to private registries. + Non-auth information in the dockerconfig.json is not allowed. + """ + auths: Annotated[Dict[SecretStr, RegistryAuth], AfterValidator(length_validator)] -class RegenerateBundlePydanticModel(PydanticModel): +class RegenerateBundlePydanticModel(PydanticRequestBaseModel): """Datastructure of the request to /builds/regenerate-bundle API point.""" # BUNDLE_IMAGE, from_bundle_image_resolved, build_tags? @@ -213,12 +242,14 @@ def _get_all_keys_to_check_in_db(self): class RegenerateBundleBatchPydanticModel(BaseModel): + """Datastructure of the request to /builds/regenerate-bundle-batch API point.""" + build_requests: List[RegenerateBundlePydanticModel] annotations: Dict[str, Any] -class MergeIndexImagePydanticModel(PydanticModel): - """Datastructure of the request to /builds/regenerate-bundle API point.""" +class MergeIndexImagePydanticModel(PydanticRequestBaseModel): + """Datastructure of the request to /builds/merge-index-image API point.""" binary_image: Annotated[ Optional[str], @@ -245,11 +276,13 @@ class MergeIndexImagePydanticModel(PydanticModel): @model_validator(mode='after') def verify_graph_update_mode_with_target_index(self) -> 'MergeIndexImagePydanticModel': + """Validate graph_update_mode with target_index param combination.""" validate_graph_mode_index_image(self.graph_update_mode, self.target_index) return self @model_validator(mode='after') def verify_overwrite_from_index_token(self) -> 'MergeIndexImagePydanticModel': + """Validate overwrite_target_index with overwrite_target_index_token param combination.""" validate_overwrite_params( self.overwrite_target_index, self.overwrite_target_index_token, @@ -274,8 +307,8 @@ def _get_all_keys_to_check_in_db(self): ] -class CreateEmptyIndexPydanticModel(PydanticModel): - """Datastructure of the request to /builds/regenerate-bundle API point.""" +class CreateEmptyIndexPydanticModel(PydanticRequestBaseModel): + """Datastructure of the request to /builds/create-empty-index API point.""" binary_image: Annotated[ Optional[str], @@ -302,7 +335,9 @@ def _get_all_keys_to_check_in_db(self): return ["binary_image", "from_index"] -class RecursiveRelatedBundlesPydanticModel(PydanticModel): +class RecursiveRelatedBundlesPydanticModel(PydanticRequestBaseModel): + """Datastructure of the request to /builds/recursive-related-bundles API point.""" + organization: Optional[str] = None parent_bundle_image: Annotated[ str, @@ -322,7 +357,9 @@ def _get_all_keys_to_check_in_db(self): return ["parent_bundle_image"] -class FbcOperationsPydanticModel(PydanticModel): +class FbcOperationsPydanticModel(PydanticRequestBaseModel): + """Datastructure of the request to /builds/fbc-operations API point.""" + add_arches: Optional[List[str]] = [] binary_image: Annotated[ Optional[str], @@ -357,6 +394,7 @@ class FbcOperationsPydanticModel(PydanticModel): @model_validator(mode='after') def verify_overwrite_from_index_token(self) -> 'FbcOperationsPydanticModel': + """Validate overwrite_from_index and overwrite_from_index_token param combination.""" validate_overwrite_params(self.overwrite_from_index, self.overwrite_from_index_token) return self diff --git a/iib/common/pydantic_utils.py b/iib/common/pydantic_utils.py index 83951b87..9e5dabd7 100644 --- a/iib/common/pydantic_utils.py +++ b/iib/common/pydantic_utils.py @@ -14,18 +14,21 @@ # TODO add regex in future to not allow following values ":s", "s:", ":"? def image_format_check(image_name: str) -> str: + """Check format of the index image.""" if '@' not in image_name and ':' not in image_name: raise ValidationError(f'Image {image_name} should have a tag or a digest specified.') return image_name def images_format_check(image_list: List[str]) -> List[str]: + """Check multiple image names.""" for image_name in image_list: image_format_check(image_name) return image_list def get_unique_bundles(bundles: List[str]) -> List[str]: + """Check and possibly remove duplicates from a list of bundles.""" if not bundles: return bundles @@ -42,14 +45,15 @@ def get_unique_bundles(bundles: List[str]) -> List[str]: # RequestIndexImageMixin -def get_unique_deprecation_list_items(deprecation_list: Optional[List[str]]) -> Optional[List[str]]: +def get_unique_deprecation_list_items(deprecation_list: List[str]) -> List[str]: + """Return a list of unique items.""" return list(set(deprecation_list)) def validate_graph_mode_index_image( - graph_update_mode: str, - index_image: str, -) -> 'MergeIndexImageRequestPayload': + graph_update_mode: Optional[GRAPH_MODE_LITERAL], + index_image: Optional[str], +) -> Optional[str]: """ Validate graph mode and check if index image is allowed to use different graph mode. @@ -59,8 +63,7 @@ def validate_graph_mode_index_image( :raises: Forbidden when graph_mode can't be used for given index image """ if graph_update_mode: - # TODO remove this comment, replace value with current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] - allowed_from_indexes: List[str] = ["REMOVE_#:r"] + allowed_from_indexes: List[str] = current_app.config['IIB_GRAPH_MODE_INDEX_ALLOW_LIST'] if index_image not in allowed_from_indexes: raise Forbidden( '"graph_update_mode" can only be used on the' @@ -70,18 +73,15 @@ def validate_graph_mode_index_image( # RequestIndexImageMixin -def from_index_add_arches(model: 'AddRequestPydanticModel') -> 'AddRequestPydanticModel': +def from_index_add_arches(from_index: Optional[str], add_arches: Optional[List[str]]) -> None: """Check if both `from_index` and `add_arches` are not specified.""" - if not model.from_index and not model.add_arches: + if not from_index and not add_arches: raise ValidationError('One of "from_index" or "add_arches" must be specified') - return model # RequestIndexImageMixin def binary_image_check(binary_image: str) -> str: - """ - # Validate binary_image is correctly provided. - """ + """Validate binary_image is correctly provided.""" if not binary_image and not current_app.config['IIB_BINARY_IMAGE_CONFIG']: raise ValidationError('The "binary_image" value must be a non-empty string') return binary_image @@ -93,9 +93,7 @@ def validate_overwrite_params( overwrite_index_image_token: Optional[str], disable_auth_check: Optional[bool] = False, ) -> None: - """ - Check if both `overwrite_index_image` and `overwrite_index_image_token` are specified. - """ + """Check if both `overwrite_index_image` and `overwrite_index_image_token` are specified.""" if overwrite_index_image_token and not overwrite_index_image: raise ValidationError( 'The "overwrite_from_index" parameter is required when' @@ -114,10 +112,12 @@ def validate_overwrite_params( # RequestIndexImageMixin def distribution_scope_lower(distribution_scope: str) -> str: + """Transform distribution_scope parameter to lowercase.""" return distribution_scope.lower() def length_validator(model_property: Any) -> Any: + """Validate length of the given model property.""" if len(model_property) == 0: raise ValidationError( f"The {type(model_property)} {model_property} should have at least 1 item." diff --git a/iib/web/iib_static_types.py b/iib/web/iib_static_types.py index f4682781..d13d193c 100644 --- a/iib/web/iib_static_types.py +++ b/iib/web/iib_static_types.py @@ -1,5 +1,5 @@ # SPDX-License-Identifier: GPL-3.0-or-later -from typing import Any, Dict, List, NamedTuple, Optional, Union, Sequence, Set +from typing import Any, Dict, List, NamedTuple, Optional, Sequence, Set from typing_extensions import NotRequired, TypedDict, Literal from proton._message import Message diff --git a/iib/workers/tasks/build.py b/iib/workers/tasks/build.py index 4531614c..f57e1741 100644 --- a/iib/workers/tasks/build.py +++ b/iib/workers/tasks/build.py @@ -291,16 +291,20 @@ def _update_index_image_pull_spec( else: index_image = output_pull_spec - payload: UpdateRequestPayload = {'arches': list(arches), 'index_image': index_image} + update_payload: UpdateRequestPayload = {'arches': list(arches), 'index_image': index_image} if add_or_rm: with set_registry_token(overwrite_from_index_token, from_index, append=True): index_image_resolved = get_resolved_image(index_image) - payload['index_image_resolved'] = index_image_resolved - payload['internal_index_image_copy'] = output_pull_spec - payload['internal_index_image_copy_resolved'] = get_resolved_image(output_pull_spec) + update_payload['index_image_resolved'] = index_image_resolved + update_payload['internal_index_image_copy'] = output_pull_spec + update_payload['internal_index_image_copy_resolved'] = get_resolved_image(output_pull_spec) - update_request(request_id, payload, exc_msg='Failed setting the index image on the request') + update_request( + request_id, + update_payload, + exc_msg='Failed setting the index image on the request', + ) def _get_external_arch_pull_spec( @@ -670,7 +674,7 @@ def _update_index_image_build_state( image. """ arches_str = ', '.join(sorted(prebuild_info['arches'])) - payload: UpdateRequestPayload = { + update_payload: UpdateRequestPayload = { 'binary_image': prebuild_info['binary_image'], 'binary_image_resolved': prebuild_info['binary_image_resolved'], 'state': 'in_progress', @@ -680,26 +684,26 @@ def _update_index_image_build_state( bundle_mapping: Optional[Dict[str, List[str]]] = prebuild_info.get('bundle_mapping') if bundle_mapping: - payload['bundle_mapping'] = bundle_mapping + update_payload['bundle_mapping'] = bundle_mapping from_index_resolved = prebuild_info.get('from_index_resolved') if from_index_resolved: - payload['from_index_resolved'] = from_index_resolved + update_payload['from_index_resolved'] = from_index_resolved source_from_index_resolved = prebuild_info.get('source_from_index_resolved') if source_from_index_resolved: - payload['source_from_index_resolved'] = source_from_index_resolved + update_payload['source_from_index_resolved'] = source_from_index_resolved target_index_resolved = prebuild_info.get('target_index_resolved') if target_index_resolved: - payload['target_index_resolved'] = target_index_resolved + update_payload['target_index_resolved'] = target_index_resolved fbc_fragment_resolved = prebuild_info.get('fbc_fragment_resolved') if fbc_fragment_resolved: - payload['fbc_fragment_resolved'] = fbc_fragment_resolved + update_payload['fbc_fragment_resolved'] = fbc_fragment_resolved exc_msg = 'Failed setting the resolved images on the request' - update_request(request_id, payload, exc_msg) + update_request(request_id, update_payload, exc_msg) @retry( diff --git a/iib/workers/tasks/build_recursive_related_bundles.py b/iib/workers/tasks/build_recursive_related_bundles.py index 4c83bb23..92d5919d 100644 --- a/iib/workers/tasks/build_recursive_related_bundles.py +++ b/iib/workers/tasks/build_recursive_related_bundles.py @@ -71,14 +71,14 @@ def handle_recursive_related_bundles_request( with set_registry_auths(payload.registry_auths): parent_bundle_image_resolved = get_resolved_image(payload.parent_bundle_image) - payload: UpdateRequestPayload = { + update_payload: UpdateRequestPayload = { 'parent_bundle_image_resolved': parent_bundle_image_resolved, 'state': 'in_progress', 'state_reason': ( f'Finding recursive related bundles for the bundle: {payload.parent_bundle_image}' ), } - update_request(request_id, payload) + update_request(request_id, update_payload) recursive_related_bundles = [parent_bundle_image_resolved] current_level_related_bundles = [parent_bundle_image_resolved] @@ -102,11 +102,15 @@ def handle_recursive_related_bundles_request( if not current_level_related_bundles: traversal_completed = True - payload = { + update_payload = { 'state': 'in_progress', 'state_reason': 'Writing recursive related bundles to a file', } - update_request(request_id, payload, exc_msg='Failed setting the bundle image on the request') + update_request( + request_id, + update_payload, + exc_msg='Failed setting the bundle image on the request', + ) # Reverse the list while writing because we did a top to bottom level traversal of a tree. # The return value should be a bottom to top level traversal. write_related_bundles_file( @@ -116,12 +120,16 @@ def handle_recursive_related_bundles_request( 'recursive_related_bundles', ) - payload = { + update_payload = { 'state': 'complete', 'state_reason': 'The request completed successfully', } _cleanup() - update_request(request_id, payload, exc_msg='Failed setting the bundle image on the request') + update_request( + request_id, + update_payload, + exc_msg='Failed setting the bundle image on the request', + ) def process_parent_bundle_image( diff --git a/iib/workers/tasks/build_regenerate_bundle.py b/iib/workers/tasks/build_regenerate_bundle.py index 3feaa496..9aa173f8 100644 --- a/iib/workers/tasks/build_regenerate_bundle.py +++ b/iib/workers/tasks/build_regenerate_bundle.py @@ -89,13 +89,13 @@ def handle_regenerate_bundle_request( arches_str = ', '.join(sorted(arches)) log.debug('Set to regenerate the bundle image for the following arches: %s', arches_str) - payload: UpdateRequestPayload = { + update_payload: UpdateRequestPayload = { 'from_bundle_image_resolved': from_bundle_image_resolved, 'state': 'in_progress', 'state_reason': f'Regenerating the bundle image for the following arches: {arches_str}', } exc_msg = 'Failed setting the resolved "from_bundle_image" on the request' - update_request(request_id, payload, exc_msg=exc_msg) + update_request(request_id, update_payload, exc_msg=exc_msg) # Pull the from_bundle_image to ensure steps later on don't fail due to registry timeouts podman_pull(from_bundle_image_resolved) @@ -146,14 +146,18 @@ def handle_regenerate_bundle_request( output_pull_spec, ) - payload = { + update_payload = { 'arches': list(arches), 'bundle_image': output_pull_spec, 'state': 'complete', 'state_reason': 'The request completed successfully', } _cleanup() - update_request(request_id, payload, exc_msg='Failed setting the bundle image on the request') + update_request( + request_id, + update_payload, + exc_msg='Failed setting the bundle image on the request', + ) def _apply_package_name_suffix( From 517dd5e99207acccf1e8c3cd6875c69052b955da Mon Sep 17 00:00:00 2001 From: xdaile Date: Thu, 1 Feb 2024 20:14:48 +0100 Subject: [PATCH 6/6] Tests update --- iib/common/pydantic_models.py | 10 +- iib/common/pydantic_utils.py | 2 +- iib/web/models.py | 6 +- tests/test_web/test_migrations.py | 149 +++++++++-------- tests/test_workers/test_tasks/test_build.py | 153 ++++++++++-------- .../test_build_create_empty_index.py | 23 ++- .../test_tasks/test_build_fbc_operations.py | 11 +- 7 files changed, 202 insertions(+), 152 deletions(-) diff --git a/iib/common/pydantic_models.py b/iib/common/pydantic_models.py index fbe525a6..f88deb4d 100644 --- a/iib/common/pydantic_models.py +++ b/iib/common/pydantic_models.py @@ -62,7 +62,7 @@ class AddPydanticModel(PydanticRequestBaseModel): AfterValidator(length_validator), AfterValidator(binary_image_check), ] = None - build_tags: Optional[List[str]] = [] + build_tags: Optional[List[str]] = None bundles: Annotated[ List[str], AfterValidator(length_validator), @@ -156,7 +156,7 @@ class RmPydanticModel(PydanticRequestBaseModel): Optional[str], AfterValidator(binary_image_check), ] = None - build_tags: Optional[List[str]] = [] + build_tags: Optional[List[str]] = None distribution_scope: Annotated[ Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), @@ -256,7 +256,7 @@ class MergeIndexImagePydanticModel(PydanticRequestBaseModel): AfterValidator(image_format_check), AfterValidator(binary_image_check), ] = None - build_tags: Optional[List[str]] = [] + build_tags: Optional[List[str]] = None deprecation_list: Annotated[ Optional[List[str]], AfterValidator(get_unique_deprecation_list_items), @@ -267,6 +267,7 @@ class MergeIndexImagePydanticModel(PydanticRequestBaseModel): BeforeValidator(distribution_scope_lower), ] = None graph_update_mode: Optional[GRAPH_MODE_LITERAL] = None + ignore_bundle_ocp_version: Optional[bool] = None overwrite_target_index: Optional[bool] = False overwrite_target_index_token: Optional[SecretStr] = None source_from_index: Annotated[str, AfterValidator(image_format_check)] @@ -303,7 +304,6 @@ def _get_all_keys_to_check_in_db(self): "deprecation_list", "source_from_index", "target_index", - "target_index", ] @@ -373,7 +373,7 @@ class FbcOperationsPydanticModel(PydanticRequestBaseModel): AfterValidator(get_unique_bundles), AfterValidator(images_format_check), ] = None - build_tags: Optional[List[str]] = [] + build_tags: Optional[List[str]] = None distribution_scope: Annotated[ Optional[DISTRIBUTION_SCOPE_LITERAL], BeforeValidator(distribution_scope_lower), diff --git a/iib/common/pydantic_utils.py b/iib/common/pydantic_utils.py index 9e5dabd7..61e76d9f 100644 --- a/iib/common/pydantic_utils.py +++ b/iib/common/pydantic_utils.py @@ -118,7 +118,7 @@ def distribution_scope_lower(distribution_scope: str) -> str: def length_validator(model_property: Any) -> Any: """Validate length of the given model property.""" - if len(model_property) == 0: + if model_property is not None and len(model_property) == 0: raise ValidationError( f"The {type(model_property)} {model_property} should have at least 1 item." ) diff --git a/iib/web/models.py b/iib/web/models.py index 02918cd8..11c0b021 100644 --- a/iib/web/models.py +++ b/iib/web/models.py @@ -540,9 +540,9 @@ def from_json_replacement( request_kwargs['batch'] = batch request = cls(**request_kwargs) - - for bt in payload.build_tags: - request.add_build_tag(bt) + if payload.model_fields.get("build_tags") and payload.build_tags: + for bt in payload.build_tags: + request.add_build_tag(bt) request.add_state('in_progress', 'The request was initiated') return request diff --git a/tests/test_web/test_migrations.py b/tests/test_web/test_migrations.py index 85e38a16..d7abad13 100644 --- a/tests/test_web/test_migrations.py +++ b/tests/test_web/test_migrations.py @@ -5,13 +5,20 @@ import pytest from iib.web.models import ( + Request, RequestAdd, RequestMergeIndexImage, RequestRegenerateBundle, RequestRm, RequestCreateEmptyIndex, ) - +from iib.common.pydantic_models import ( + AddPydanticModel, + RmPydanticModel, + MergeIndexImagePydanticModel, + RegenerateBundlePydanticModel, + CreateEmptyIndexPydanticModel, +) INITIAL_DB_REVISION = '274ba38408e8' @@ -25,19 +32,19 @@ def test_migrate_to_polymorphic_requests(app, auth_env, client, db): for i in range(total_requests): request_class = random.choice((RequestAdd, RequestRm)) if request_class == RequestAdd: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'bundles': [f'quay.io/namespace/bundle:{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestAdd.from_json(data) + data = AddPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + bundles=[f'quay.io/namespace/bundle:{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestAdd.from_json_replacement(payload=data) elif request_class == RequestRm: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'operators': [f'operator-{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestRm.from_json(data) + data = RmPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + operators=[f'operator-{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestRm.from_json_replacement(data) if i % 5 == 0: # Simulate failed request @@ -63,26 +70,26 @@ def test_migrate_to_merge_index_endpoints(app, auth_env, client, db): for i in range(total_requests): request_class = random.choice((RequestAdd, RequestMergeIndexImage, RequestRm)) if request_class == RequestAdd: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'bundles': [f'quay.io/namespace/bundle:{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestAdd.from_json(data) + data = AddPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + bundles=[f'quay.io/namespace/bundle:{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestAdd.from_json_replacement(data) elif request_class == RequestRm: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'operators': [f'operator-{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestRm.from_json(data) + data = RmPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + operators=[f'operator-{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestRm.from_json_replacement(data) elif request_class == RequestMergeIndexImage: - data = { - 'source_from_index': f'quay.io/namespace/repo:{i}', - 'target_index': f'quay.io/namespace/repo:{i}', - 'binary_image': 'quay.io/namespace/binary_image:latest', - } - request = RequestMergeIndexImage.from_json(data) + data = MergeIndexImagePydanticModel( + source_from_index=f'quay.io/namespace/repo:{i}', + target_index=f'quay.io/namespace/repo:{i}', + binary_image='quay.io/namespace/binary_image:latest', + ) + request = RequestMergeIndexImage.from_json_replacement(data) if i % 5 == 0: # Simulate failed request @@ -104,31 +111,35 @@ def test_abort_when_downgrading_from_regenerate_bundle_request(app, auth_env, cl # flask_login.current_user is used in Request*.from_json which requires a request context with app.test_request_context(environ_base=auth_env): # Always add a RequestRegenerateBundle to ensure sufficient test data is available - data = {'from_bundle_image': 'quay.io/namespace/bundle-image:latest'} - request = RequestRegenerateBundle.from_json(data) + data = RegenerateBundlePydanticModel( + from_bundle_image='quay.io/namespace/bundle-image:latest' + ) + request = RequestRegenerateBundle.from_json_replacement(data) db.session.add(request) # One request was already added, let's add the remaining ones for i in range(total_requests - 1): request_class = random.choice((RequestAdd, RequestRm, RequestRegenerateBundle)) if request_class == RequestAdd: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'bundles': [f'quay.io/namespace/bundle:{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestAdd.from_json(data) + data = AddPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + bundles=[f'quay.io/namespace/bundle:{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestAdd.from_json_replacement(data) elif request_class == RequestRm: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'operators': [f'operator-{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestRm.from_json(data) + data = RmPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + operators=[f'operator-{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestRm.from_json_replacement(data) else: - data = {'from_bundle_image': 'quay.io/namespace/bundle-image:latest'} - request = RequestRegenerateBundle.from_json(data) + data = RegenerateBundlePydanticModel( + from_bundle_image='quay.io/namespace/bundle-image:latest' + ) + request = RequestRegenerateBundle.from_json_replacement(data) db.session.add(request) db.session.commit() @@ -148,35 +159,35 @@ def test_create_empty_index_image_request(app, auth_env, client, db): # which requires a request context with app.test_request_context(environ_base=auth_env): # Generate some data to verify migration - data = { - 'from_index': 'quay.io/namespace/index_image:latest', - 'binary_image': 'quay.io/namespace/binary_image:latest', - } - request = RequestCreateEmptyIndex.from_json(data) + data = CreateEmptyIndexPydanticModel( + from_index='quay.io/namespace/index_image:latest', + binary_image='quay.io/namespace/binary_image:latest', + ) + request = RequestCreateEmptyIndex.from_json_replacement(data) db.session.add(request) for i in range(total_requests): request_class = random.choice((RequestAdd, RequestRm, RequestCreateEmptyIndex)) if request_class == RequestAdd: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'bundles': [f'quay.io/namespace/bundle:{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestAdd.from_json(data) + data = AddPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + bundles=[f'quay.io/namespace/bundle:{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestAdd.from_json_replacement(data) elif request_class == RequestRm: - data = { - 'binary_image': 'quay.io/namespace/binary_image:latest', - 'operators': [f'operator-{i}'], - 'from_index': f'quay.io/namespace/repo:{i}', - } - request = RequestRm.from_json(data) + data = RmPydanticModel( + binary_image='quay.io/namespace/binary_image:latest', + operators=[f'operator-{i}'], + from_index=f'quay.io/namespace/repo:{i}', + ) + request = RequestRm.from_json_replacement(data) elif request_class == RequestCreateEmptyIndex: - data = { - 'from_index': f'quay.io/namespace/index_image:{i}', - 'binary_image': 'quay.io/namespace/binary_image:latest', - } - request = RequestCreateEmptyIndex.from_json(data) + data = CreateEmptyIndexPydanticModel( + from_index=f'quay.io/namespace/index_image:{i}', + binary_image='quay.io/namespace/binary_image:latest', + ) + request = RequestCreateEmptyIndex.from_json_replacement(data) if i % 5 == 0: # Simulate failed request diff --git a/tests/test_workers/test_tasks/test_build.py b/tests/test_workers/test_tasks/test_build.py index de178f14..e72866c6 100644 --- a/tests/test_workers/test_tasks/test_build.py +++ b/tests/test_workers/test_tasks/test_build.py @@ -12,6 +12,7 @@ from iib.workers.tasks import build from iib.workers.tasks.utils import RequestConfigAddRm from iib.workers.config import get_worker_config +from iib.common.pydantic_models import AddPydanticModel, RmPydanticModel from operator_manifest.operator import ImageName worker_config = get_worker_config() @@ -622,7 +623,9 @@ def test_buildah_fail_max_retries(mock_run_cmd: mock.MagicMock) -> None: @mock.patch('iib.workers.tasks.build._get_present_bundles') @mock.patch('iib.workers.tasks.build.set_registry_token') @mock.patch('iib.workers.tasks.build.is_image_fbc') +@mock.patch('iib.common.pydantic_models.binary_image_check') def test_handle_add_request( + mock_binary_image_check, mock_iifbc, mock_srt, mock_gpb, @@ -692,24 +695,24 @@ def side_effect(*args, base_dir, **kwargs): mock_ors.return_value = (port, my_mock) mock_run_cmd.return_value = '{"packageName": "package1", "version": "v1.0", \ "bundlePath": "bundle1"\n}' - - build.handle_add_request( - bundles, - 3, - binary_image, - 'from-index:latest', - ['s390x'], - cnr_token, - organization, - force_backport, - False, - None, - None, - greenwave_config, - binary_image_config=binary_image_config, + add_pydantic_model = AddPydanticModel.model_construct( + bundles=bundles, + binary_image=binary_image, + from_index='from_index:latest', + cnr_token=cnr_token, + organization=organization, + force_backport=force_backport, + overwrite_from_index=False, + overwrite_from_index_token=None, deprecation_list=deprecation_list, build_tags=["extra_tag1", "extra_tag2"], ) + build.handle_add_request( + payload=add_pydantic_model, + request_id=3, + greenwave_config=greenwave_config, + binary_image_config=binary_image_config, + ) mock_ors.assert_called_once() mock_run_cmd.assert_called_once() @@ -778,21 +781,24 @@ def side_effect(*args, base_dir, **kwargs): def test_handle_add_request_raises(mock_iifbc, mock_runcmd, mock_c): mock_iifbc.return_value = True with pytest.raises(IIBError): - build.handle_add_request( + add_pydantic_model = AddPydanticModel.model_construct( bundles=['some-bundle:2.3-1', 'some-deprecation-bundle:1.1-1'], - request_id=3, binary_image='binary-image:latest', - from_index='from-index:latest', add_arches=['s390x'], + from_index='from_index:latest', cnr_token='token', organization='org', force_backport=True, overwrite_from_index=False, overwrite_from_index_token=None, distribution_scope=None, + deprecation_list=[], + ) + build.handle_add_request( + payload=add_pydantic_model, + request_id=3, greenwave_config={'some_key': 'other_value'}, binary_image_config={'prod': {'v4.5': 'some_image'}}, - deprecation_list=[], ) @@ -897,21 +903,24 @@ def deprecate_bundles_mock(*args, **kwargs): ] mock_sqlite.execute.return_value = 200 + add_pydantic_model = AddPydanticModel.model_construct( + bundles=bundles, + binary_image='binary-image:latest', + add_arches=['s390x'], + from_index='from_index:latest', + cnr_token=cnr_token, + organization=organization, + force_backport=True, + overwrite_from_index=False, + overwrite_from_index_token=None, + distribution_scope=None, + deprecation_list=deprecation_list, + ) build.handle_add_request( - bundles, - 3, - 'binary-image:latest', - 'from-index:latest', - ['s390x'], - cnr_token, - organization, - True, - False, - None, - None, - greenwave_config, + payload=add_pydantic_model, + request_id=3, + greenwave_config=greenwave_config, binary_image_config=binary_image_config, - deprecation_list=deprecation_list, ) mock_ors.assert_called_once() @@ -983,19 +992,21 @@ def test_handle_add_request_gating_failure( organization = 'org' greenwave_config = {'some_key': 'other_value'} with pytest.raises(IIBError, match=error_msg): + add_pydantic_model = AddPydanticModel.model_construct( + bundles=bundles, + binary_image='binary-image:latest', + add_arches=['s390x'], + from_index='from_index:latest', + cnr_token=cnr_token, + organization=organization, + overwrite_from_index=False, + overwrite_from_index_token=None, + distribution_scope=None, + ) build.handle_add_request( - bundles, - 'binary-image:latest', - 3, - 'from-index:latest', - ['s390x'], - cnr_token, - organization, - None, - False, - None, - None, - greenwave_config, + payload=add_pydantic_model, + request_id=3, + greenwave_config=greenwave_config, ) assert mock_cleanup.call_count == 1 mock_srs2.assert_called_once() @@ -1014,17 +1025,20 @@ def test_handle_add_request_bundle_resolution_failure(mock_grb, mock_srs, mock_c organization = 'org' greenwave_config = {'some_key': 'other_value'} with pytest.raises(IIBError, match=error_msg): + add_pydantic_model = AddPydanticModel.model_construct( + bundles=bundles, + binary_image='binary-image:latest', + add_arches=['s390x'], + from_index='from_index:latest', + cnr_token=cnr_token, + organization=organization, + force_backport=False, + overwrite_from_index=False, + overwrite_from_index_token=None, + ) build.handle_add_request( - bundles, - 'binary-image:latest', - 3, - 'from-index:latest', - ['s390x'], - cnr_token, - organization, - False, - False, - None, + payload=add_pydantic_model, + request_id=3, greenwave_config=greenwave_config, ) assert mock_cleanup.call_count == 1 @@ -1073,11 +1087,14 @@ def test_handle_rm_request( 'distribution_scope': 'PROD', } binary_image_config = {'prod': {'v4.6': 'some_image'}} + rm_pydantic_model = RmPydanticModel.model_construct( + operators=['some_operator'], + from_index='from-index:latest', + binary_image=binary_image, + ) build.handle_rm_request( - ['some-operator'], - 3, - 'from-index:latest', - binary_image, + payload=rm_pydantic_model, + request_id=3, binary_image_config=binary_image_config, ) @@ -1162,11 +1179,14 @@ def test_handle_rm_request_fbc( mock_om.return_value = "/tmp/xyz/catalog" mock_orrf.return_value = "/tmp/fbc_dir", "/tmp/cache_dir" mock_gcd.return_value = "/some/path" - build.handle_rm_request( - operators=['some-operator'], - request_id=5, + rm_pydantic_model = RmPydanticModel.model_construct( + operators=['some_operator'], from_index='from-index:latest', binary_image='binary-image:latest', + ) + build.handle_rm_request( + payload=rm_pydantic_model, + request_id=5, binary_image_config={'prod': {'v4.6': 'some_image'}}, ) mock_prfb.assert_called_once_with( @@ -1446,9 +1466,8 @@ def test_handle_add_request_check_related_images_fail( mock_grb.return_value = ['some-bundle@sha256:123'] mock_iri.side_effect = IIBError(error_msg) with pytest.raises(IIBError, match=re.escape(error_msg)): - build.handle_add_request( + add_pydantic_model = AddPydanticModel.model_construct( bundles=bundles, - request_id=3, binary_image='binary-image:latest', from_index='from-index:latest', add_arches=['s390x'], @@ -1458,13 +1477,17 @@ def test_handle_add_request_check_related_images_fail( overwrite_from_index=False, overwrite_from_index_token=None, distribution_scope=None, - greenwave_config=None, - binary_image_config={'prod': {'v4.5': 'some_image'}}, deprecation_list=[], build_tags=None, graph_update_mode=None, check_related_images=True, ) + build.handle_add_request( + payload=add_pydantic_model, + request_id=3, + greenwave_config=None, + binary_image_config={'prod': {'v4.5': 'some_image'}}, + ) assert mock_cleanup.call_count == 1 mock_srs.assert_called_once() mock_grb.assert_called_once_with(bundles) diff --git a/tests/test_workers/test_tasks/test_build_create_empty_index.py b/tests/test_workers/test_tasks/test_build_create_empty_index.py index 5b06b07b..1b8f0c64 100644 --- a/tests/test_workers/test_tasks/test_build_create_empty_index.py +++ b/tests/test_workers/test_tasks/test_build_create_empty_index.py @@ -6,6 +6,7 @@ from iib.exceptions import IIBError from iib.workers.tasks import build_create_empty_index from iib.workers.tasks.utils import RequestConfigCreateIndexImage +from iib.common.pydantic_models import CreateEmptyIndexPydanticModel @mock.patch('iib.workers.tasks.build_create_empty_index.grpcurl_get_db_data') @@ -81,12 +82,15 @@ def test_handle_create_empty_index_request( output_pull_spec = 'quay.io/namespace/some-image:3' mock_capml.return_value = output_pull_spec - build_create_empty_index.handle_create_empty_index_request( + create_empty_index_pydantic_model = CreateEmptyIndexPydanticModel.model_construct( from_index=from_index, - request_id=3, output_fbc=False, binary_image=binary_image, labels=labels, + ) + build_create_empty_index.handle_create_empty_index_request( + payload=create_empty_index_pydantic_model, + request_id=3, binary_image_config=binary_image_config, ) @@ -141,12 +145,16 @@ def test_handle_create_empty_index_request_raises(mock_prfb, mock_iifbc, mock_c) IIBError, match=('Cannot create SQLite index image from File-Based Catalog index image') ): mock_iifbc.return_value = True - build_create_empty_index.handle_create_empty_index_request( + + create_empty_index_pydantic_model = CreateEmptyIndexPydanticModel.model_construct( from_index=from_index, - request_id=3, output_fbc=False, binary_image=binary_image, labels={"version": "v4.5"}, + ) + build_create_empty_index.handle_create_empty_index_request( + payload=create_empty_index_pydantic_model, + request_id=3, binary_image_config={'prod': {'v4.5': 'some_image'}}, ) @@ -194,12 +202,15 @@ def test_handle_create_empty_index_request_fbc( output_pull_spec = 'quay.io/namespace/some-image:3' mock_capml.return_value = output_pull_spec - build_create_empty_index.handle_create_empty_index_request( + create_empty_index_pydantic_model = CreateEmptyIndexPydanticModel.model_construct( from_index=from_index, - request_id=3, output_fbc=True, binary_image=binary_image, labels={"version": "v4.5"}, + ) + build_create_empty_index.handle_create_empty_index_request( + payload=create_empty_index_pydantic_model, + request_id=3, binary_image_config={'prod': {'v4.5': 'some_image'}}, ) diff --git a/tests/test_workers/test_tasks/test_build_fbc_operations.py b/tests/test_workers/test_tasks/test_build_fbc_operations.py index 3488fbc4..7b835f26 100644 --- a/tests/test_workers/test_tasks/test_build_fbc_operations.py +++ b/tests/test_workers/test_tasks/test_build_fbc_operations.py @@ -3,6 +3,7 @@ from iib.workers.tasks import build_fbc_operations from iib.workers.tasks.utils import RequestConfigFBCOperation +from iib.common.pydantic_models import FbcOperationsPydanticModel @mock.patch('iib.workers.tasks.build_fbc_operations._update_index_image_pull_spec') @@ -48,11 +49,15 @@ def test_handle_fbc_operation_request( } mock_gri.return_value = 'fbc-fragment@sha256:qwerty' - build_fbc_operations.handle_fbc_operation_request( - request_id=request_id, - fbc_fragment=fbc_fragment, + fbc_operations_pydantic_model = FbcOperationsPydanticModel.model_construct( from_index=from_index, binary_image=binary_image, + fbc_fragment=fbc_fragment, + + ) + build_fbc_operations.handle_fbc_operation_request( + payload=fbc_operations_pydantic_model, + request_id=request_id, binary_image_config=binary_image_config, ) mock_prfb.assert_called_once_with(